Skip to content
This repository was archived by the owner on Oct 1, 2021. It is now read-only.

feat: add types #66

Merged
merged 21 commits into from
Mar 4, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
49 changes: 38 additions & 11 deletions .aegir.js
Original file line number Diff line number Diff line change
@@ -1,17 +1,44 @@
'use strict'

module.exports = {
karma: {
// multi-bucket pinset migrations are slow
browserNoActivityTimeout: 240 * 1000
},
webpack: {
node: {
// this is needed until level stops using node buffers in browser code
Buffer: true,
const path = require('path')

// needed by cbor, binary-parse-stream and nofilter
stream: true
const esbuild = {
// this will inject all the named exports from 'node-globals.js' as globals
inject: [path.join(__dirname, 'scripts/node-globals.js')],
plugins: [
{
name: 'node built ins', // this will make the bundler resolve node builtins to the respective browser polyfill
setup (build) {
build.onResolve({ filter: /^stream$/ }, () => {
return { path: require.resolve('readable-stream') }
})
build.onResolve({ filter: /^multiformats\/hashes\/digest$/ }, () => {
// remove when https://github.com/evanw/esbuild/issues/187 is fixed
return { path: require.resolve('multiformats/hashes/digest') }
})
build.onResolve({ filter: /^multiformats$/ }, () => {
// remove when https://github.com/evanw/esbuild/issues/187 is fixed
return { path: require.resolve('multiformats') }
})
build.onResolve({ filter: /^cborg$/ }, () => {
// remove when https://github.com/evanw/esbuild/issues/187 is fixed
return { path: require.resolve('cborg') }
})
}
}
]
}

/** @type {import('aegir').PartialOptions} */
module.exports = {
test: {
browser: {
config: {
buildConfig: esbuild
}
}
},
build: {
config: esbuild
}
}
4 changes: 2 additions & 2 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
language: node_js
cache: npm
dist: bionic

branches:
only:
Expand Down Expand Up @@ -27,7 +28,6 @@ jobs:
include:
- stage: check
script:
- npx aegir commitlint --travis
- npx aegir dep-check
- npm run lint

Expand All @@ -41,7 +41,7 @@ jobs:
name: firefox
addons:
firefox: latest
script: npx aegir test -t browser -- --browsers FirefoxHeadless
script: npx aegir test -t browser -- --browser firefox

- stage: test
name: electron-main
Expand Down
5 changes: 5 additions & 0 deletions migrations/index.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,13 @@
'use strict'

/**
* @type {import('../src/types').Migration}
*/
const emptyMigration = {
description: 'Empty migration.',
// @ts-ignore
migrate: () => {},
// @ts-ignore
revert: () => {},
empty: true
}
Expand Down
99 changes: 80 additions & 19 deletions migrations/migration-10/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,24 @@ const {
createStore,
findLevelJs
} = require('../../src/utils')
const { Key } = require('interface-datastore')
const fromString = require('uint8arrays/from-string')
const toString = require('uint8arrays/to-string')

/**
* @typedef {import('../../src/types').Migration} Migration
* @typedef {import('interface-datastore').Datastore} Datastore
* @typedef {import('../../src/types').MigrationProgressCallback} MigrationProgressCallback
*
* @typedef {{ type: 'del', key: string | Uint8Array } | { type: 'put', key: string | Uint8Array, value: Uint8Array }} Operation
* @typedef {function (string, Uint8Array): Operation[]} UpgradeFunction
* @typedef {function (Uint8Array, Uint8Array): Operation[]} DowngradeFunction
*/

/**
* @param {string} name
* @param {Datastore} store
* @param {(message: string) => void} onProgress
*/
async function keysToBinary (name, store, onProgress = () => {}) {
let db = findLevelJs(store)

Expand All @@ -20,14 +34,24 @@ async function keysToBinary (name, store, onProgress = () => {}) {

onProgress(`Upgrading ${name}`)

await withEach(db, (key, value) => {
/**
* @type {UpgradeFunction}
*/
const upgrade = (key, value) => {
return [
{ type: 'del', key: key },
{ type: 'put', key: fromString(key), value: value }
]
})
}

await withEach(db, upgrade)
}

/**
* @param {string} name
* @param {Datastore} store
* @param {(message: string) => void} onProgress
*/
async function keysToStrings (name, store, onProgress = () => {}) {
let db = findLevelJs(store)

Expand All @@ -40,14 +64,26 @@ async function keysToStrings (name, store, onProgress = () => {}) {

onProgress(`Downgrading ${name}`)

await withEach(db, (key, value) => {
/**
* @type {DowngradeFunction}
*/
const downgrade = (key, value) => {
return [
{ type: 'del', key: key },
{ type: 'put', key: toString(key), value: value }
]
})
}

await withEach(db, downgrade)
}

/**
*
* @param {string} repoPath
* @param {any} repoOptions
* @param {MigrationProgressCallback} onProgress
* @param {*} fn
*/
async function process (repoPath, repoOptions, onProgress, fn) {
const datastores = Object.keys(repoOptions.storageBackends)
.filter(key => repoOptions.storageBackends[key].name === 'LevelDatastore')
Expand All @@ -63,9 +99,14 @@ async function process (repoPath, repoOptions, onProgress, fn) {
await store.open()

try {
await fn(name, store, (message) => {
onProgress(parseInt((migrated / datastores.length) * 100), message)
})
/**
* @param {string} message
*/
const progress = (message) => {
onProgress(Math.round((migrated / datastores.length) * 100), message)
}

await fn(name, store, progress)
} finally {
migrated++
store.close()
Expand All @@ -75,6 +116,7 @@ async function process (repoPath, repoOptions, onProgress, fn) {
onProgress(100, `Migrated ${datastores.length} dbs`)
}

/** @type {Migration} */
module.exports = {
version: 10,
description: 'Migrates datastore-level keys to binary',
Expand All @@ -87,23 +129,25 @@ module.exports = {
}

/**
* @typedef {Uint8Array|string} Key
* @typedef {Uint8Array} Value
* @typedef {{ type: 'del', key: Key } | { type: 'put', key: Key, value: Value }} Operation
*
* Uses the upgrade strategy from [email protected] - note we can't call the `.upgrade` command
* directly because it will be removed in [email protected] and we can't guarantee users will
* have migrated by then - e.g. they may jump from [email protected] straight to [email protected]
* so we have to duplicate the code here.
*
* @param {import('interface-datastore').Datastore} db
* @param {function (Key, Value): Operation[]} fn
* @param {any} db
* @param {UpgradeFunction | DowngradeFunction} fn
* @return {Promise<void>}
*/
function withEach (db, fn) {
/**
* @param {Operation[]} operations
* @param {(error?: Error) => void} next
*/
function batch (operations, next) {
const store = db.store('readwrite')
const transaction = store.transaction
let index = 0
/** @type {Error | undefined} */
let error

transaction.onabort = () => next(error || transaction.error || new Error('aborted by user'))
Expand Down Expand Up @@ -132,26 +176,43 @@ function withEach (db, fn) {
return new Promise((resolve, reject) => {
const it = db.iterator()
// raw keys and values only
it._deserializeKey = it._deserializeValue = (data) => data
/**
* @template T
* @param {T} data
*/
const id = (data) => data
it._deserializeKey = it._deserializeValue = id
next()

function next () {
it.next((err, key, value) => {
/**
* @param {Error | undefined} err
* @param {string | undefined} key
* @param {Uint8Array} value
*/
const handleNext = (err, key, value) => {
if (err || key === undefined) {
it.end((err2) => {
/**
* @param {Error | undefined} err2
*/
const handleEnd = (err2) => {
if (err2) {
reject(err2)
return
}

resolve()
})
}

it.end(handleEnd)

return
}

// @ts-ignore
batch(fn(key, value), next)
})
}
it.next(handleNext)
}
})
}
41 changes: 27 additions & 14 deletions migrations/migration-8/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,13 @@ const uint8ArrayToString = require('uint8arrays/to-string')
const { createStore } = require('../../src/utils')
const length = require('it-length')

/**
* @typedef {import('../../src/types').Migration} Migration
*/

/**
* @param {Key} key
*/
function keyToMultihash (key) {
const buf = mb.decode(`b${key.toString().slice(1)}`)

Expand All @@ -18,11 +25,14 @@ function keyToMultihash (key) {
multihash = mb.encode('base32', multihash).slice(1)

// Should be uppercase for interop with go
multihash = uint8ArrayToString(multihash).toUpperCase()
const multihashStr = uint8ArrayToString(multihash).toUpperCase()

return new Key(`/${multihash}`, false)
return new Key(`/${multihashStr}`, false)
}

/**
* @param {Key} key
*/
function keyToCid (key) {
const buf = mb.decode(`b${key.toString().slice(1)}`)

Expand All @@ -32,22 +42,26 @@ function keyToCid (key) {
return new Key(`/${uint8ArrayToString(multihash)}`.toUpperCase(), false)
}

/**
* @param {string} repoPath
* @param {*} repoOptions
* @param {(percent: number, message: string) => void} onProgress
* @param {(key: Key) => Key} keyFunction
*/
async function process (repoPath, repoOptions, onProgress, keyFunction) {
const blockstore = createStore(repoPath, 'blocks', repoOptions)
await blockstore.open()

let blockCount

if (onProgress) {
blockCount = await length(blockstore.query({
keysOnly: true,
filters: [({ key }) => {
const newKey = keyFunction(key)
blockCount = await length(blockstore.query({
keysOnly: true,
filters: [({ key }) => {
const newKey = keyFunction(key)

return newKey.toString() !== key.toString()
}]
}))
}
return newKey.toString() !== key.toString()
}]
}))

try {
let counter = 0
Expand All @@ -62,16 +76,15 @@ async function process (repoPath, repoOptions, onProgress, keyFunction) {
await blockstore.delete(block.key)
await blockstore.put(newKey, block.value)

if (onProgress) {
onProgress((counter / blockCount) * 100, `Migrated Block from ${block.key} to ${newKey}`)
}
onProgress((counter / blockCount) * 100, `Migrated Block from ${block.key} to ${newKey}`)
}
}
} finally {
await blockstore.close()
}
}

/** @type {Migration} */
module.exports = {
version: 8,
description: 'Transforms key names into base32 encoding and converts Block store to use bare multihashes encoded as base32',
Expand Down
Loading