From 46f7fde7e87ffbde1803887461c4480499fd98c1 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Tue, 23 Feb 2021 16:28:22 +0000 Subject: [PATCH 01/21] chore: update deps Uses new aegir --- .aegir.js | 41 +++++++++++++++++++++-------- package.json | 14 ++++++---- scripts/node-globals.js | 4 +++ test/fixtures/repo.js | 2 +- test/migrations/migration-9-test.js | 2 +- 5 files changed, 45 insertions(+), 18 deletions(-) create mode 100644 scripts/node-globals.js diff --git a/.aegir.js b/.aegir.js index 3b826a4..051371d 100644 --- a/.aegir.js +++ b/.aegir.js @@ -1,17 +1,36 @@ 'use strict' -module.exports = { - karma: { - // multi-bucket pinset migrations are slow - browserNoActivityTimeout: 240 * 1000 - }, - webpack: { - node: { - // this is needed until level stops using node buffers in browser code - Buffer: true, +const path = require('path') - // needed by cbor, binary-parse-stream and nofilter - stream: true +const esbuild = { + // this will inject all the named exports from 'node-globals.js' as globals + inject: [path.join(__dirname, 'scripts/node-globals.js')], + plugins: [ + { + name: 'node built ins', // this will make the bundler resolve node builtins to the respective browser polyfill + setup (build) { + build.onResolve({ filter: /^stream$/ }, () => { + return { path: require.resolve('readable-stream') } + }) + build.onResolve({ filter: /^assert$/ }, () => { + return { path: require.resolve('assert') } + }) + build.onResolve({ filter: /^util$/ }, () => { + return { path: require.resolve('util') } + }) + build.onResolve({ filter: /^events$/ }, () => { + return { path: require.resolve('events') } + }) + } } + ] +} + +module.exports = { + test: { + browser: esbuild + }, + build: { + config: esbuild } } diff --git a/package.json b/package.json index 12b456f..d7825bd 100644 --- a/package.json +++ b/package.json @@ -41,13 +41,13 @@ "docs": "aegir docs" }, "dependencies": { - "cbor": "^6.0.1", + "cbor": "^7.0.2", "cids": "^1.0.0", "datastore-core": "^3.0.0", "debug": "^4.1.0", "fnv1a": "^1.0.1", "interface-datastore": "^3.0.3", - "ipld-dag-pb": "^0.20.0", + "ipld-dag-pb": "^0.21.0", "it-length": "^1.0.1", "multibase": "^3.0.0", "multicodec": "^2.0.0", @@ -58,17 +58,21 @@ "varint": "^6.0.0" }, "devDependencies": { - "aegir": "^30.3.0", + "aegir": "^31.0.0", + "assert": "^2.0.0", "datastore-car": "^1.2.0", "datastore-fs": "^3.0.0", - "datastore-level": "^3.0.0", + "datastore-level": "^4.0.0", + "events": "^3.2.0", "it-all": "^1.0.2", "just-safe-set": "^2.1.0", "level-5": "npm:level@^5.0.0", "level-6": "npm:level@^6.0.0", "ncp": "^2.0.0", + "readable-stream": "^3.6.0", "rimraf": "^3.0.0", - "sinon": "^9.0.2" + "sinon": "^9.0.2", + "util": "^0.12.3" }, "engines": { "node": ">=10.0.0", diff --git a/scripts/node-globals.js b/scripts/node-globals.js new file mode 100644 index 0000000..07e0479 --- /dev/null +++ b/scripts/node-globals.js @@ -0,0 +1,4 @@ +// file: node-globals.js +// @ts-nocheck +export const { Buffer } = require('buffer') +export const process = require('process/browser') diff --git a/test/fixtures/repo.js b/test/fixtures/repo.js index 149522c..b8d45f9 100644 --- a/test/fixtures/repo.js +++ b/test/fixtures/repo.js @@ -1,6 +1,6 @@ 'use strict' -const loadFixture = require('aegir/fixtures') +const loadFixture = require('aegir/utils/fixtures') const { CONFIG_KEY, VERSION_KEY, createStore } = require('../../src/utils') async function createRepo (repoOptions, prefix) { diff --git a/test/migrations/migration-9-test.js b/test/migrations/migration-9-test.js index 3419765..2921314 100644 --- a/test/migrations/migration-9-test.js +++ b/test/migrations/migration-9-test.js @@ -9,7 +9,7 @@ const { cidToKey, PIN_DS_KEY } = require('../../migrations/migration-9/utils') const { createStore } = require('../../src/utils') const CID = require('cids') const CarDatastore = require('datastore-car') -const loadFixture = require('aegir/fixtures') +const loadFixture = require('aegir/utils/fixtures') const multibase = require('multibase') function pinToCid (key, pin) { From 8097641d5936557c34720efa9587e7fbb117cffb Mon Sep 17 00:00:00 2001 From: achingbrain Date: Tue, 23 Feb 2021 16:52:20 +0000 Subject: [PATCH 02/21] chore: fix up config --- .aegir.js | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/.aegir.js b/.aegir.js index 051371d..47a415a 100644 --- a/.aegir.js +++ b/.aegir.js @@ -12,23 +12,19 @@ const esbuild = { build.onResolve({ filter: /^stream$/ }, () => { return { path: require.resolve('readable-stream') } }) - build.onResolve({ filter: /^assert$/ }, () => { - return { path: require.resolve('assert') } - }) - build.onResolve({ filter: /^util$/ }, () => { - return { path: require.resolve('util') } - }) - build.onResolve({ filter: /^events$/ }, () => { - return { path: require.resolve('events') } - }) } } ] } +/** @type {import('aegir').PartialOptions} */ module.exports = { test: { - browser: esbuild + browser: { + config: { + buildConfig: esbuild + } + } }, build: { config: esbuild From 6c991d35ed58a370c06015accf5a3c64aa1309b9 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Tue, 23 Feb 2021 16:55:40 +0000 Subject: [PATCH 03/21] chore: update build config --- .travis.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 0ef92c5..d11a189 100644 --- a/.travis.yml +++ b/.travis.yml @@ -27,7 +27,6 @@ jobs: include: - stage: check script: - - npx aegir commitlint --travis - npx aegir dep-check - npm run lint @@ -41,7 +40,7 @@ jobs: name: firefox addons: firefox: latest - script: npx aegir test -t browser -- --browsers FirefoxHeadless + script: npx aegir test -t browser -- --browsers firefox - stage: test name: electron-main From 8e4db3c90d4c7083c586e8e9d449af463a67cb05 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Tue, 23 Feb 2021 17:51:16 +0000 Subject: [PATCH 04/21] chore: increase timeout --- .travis.yml | 4 ++-- test/migrations/migration-9-test.js | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index d11a189..5becca6 100644 --- a/.travis.yml +++ b/.travis.yml @@ -34,13 +34,13 @@ jobs: name: chrome addons: chrome: stable - script: npx aegir test -t browser + script: npx aegir test -t browser --debug - stage: test name: firefox addons: firefox: latest - script: npx aegir test -t browser -- --browsers firefox + script: npx aegir test -t browser -- --browsers firefox --debug - stage: test name: electron-main diff --git a/test/migrations/migration-9-test.js b/test/migrations/migration-9-test.js index 2921314..8294f30 100644 --- a/test/migrations/migration-9-test.js +++ b/test/migrations/migration-9-test.js @@ -102,7 +102,7 @@ async function assertPinsetRootIsPresent (datastore, pinset) { module.exports = (setup, cleanup, repoOptions) => { describe('migration 9', function () { - this.timeout(480 * 1000) + this.timeout(1000 * 1000) let dir let datastore From 2b20e870e243624082e83ca5456aae1ba214418b Mon Sep 17 00:00:00 2001 From: achingbrain Date: Mon, 1 Mar 2021 17:50:29 +0000 Subject: [PATCH 05/21] chore: expect uint8arrays --- test/migrations/migration-10-test.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/test/migrations/migration-10-test.js b/test/migrations/migration-10-test.js index b3340fc..93102fc 100644 --- a/test/migrations/migration-10-test.js +++ b/test/migrations/migration-10-test.js @@ -8,11 +8,12 @@ const { createStore } = require('../../src/utils') const migration = require('../../migrations/migration-10') const Key = require('interface-datastore').Key const fromString = require('uint8arrays/from-string') +const equals = require('uint8arrays/equals') const Level5 = require('level-5') const Level6 = require('level-6') const keys = { - CIQCKN76QUQUGYCHIKGFE6V6P3GJ2W26YFFPQW6YXV7NFHH3QB2RI3I: 'hello', + CIQCKN76QUQUGYCHIKGFE6V6P3GJ2W26YFFPQW6YXV7NFHH3QB2RI3I: fromString('hello'), CIQKKLBWAIBQZOIS5X7E32LQAL6236OUKZTMHPQSFIXPWXNZHQOV7JQ: fromString('derp') } @@ -36,7 +37,7 @@ async function validate (dir, backend, repoOptions) { const key = new Key(`/${name}`) expect(await store.has(key)).to.be.true(`Could not read key ${name}`) - expect(store.get(key)).to.eventually.equal(keys[name], `Could not read value for key ${keys[name]}`) + expect(equals(await store.get(key), keys[name])).to.be.true(`Could not read value for key ${keys[name]}`) } await store.close() From 454291d0fa235ede266e6e580272d9995f1685a7 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Tue, 2 Mar 2021 15:54:59 +0000 Subject: [PATCH 06/21] chore: swap cbor for borc --- migrations/migration-9/index.js | 2 +- package.json | 2 +- test/migrations/migration-9-test.js | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/migrations/migration-9/index.js b/migrations/migration-9/index.js index 2d8a9c8..cae54dd 100644 --- a/migrations/migration-9/index.js +++ b/migrations/migration-9/index.js @@ -2,7 +2,7 @@ const CID = require('cids') const dagpb = require('ipld-dag-pb') -const cbor = require('cbor') +const cbor = require('borc') const multicodec = require('multicodec') const multibase = require('multibase') const pinset = require('./pin-set') diff --git a/package.json b/package.json index d7825bd..414fbd1 100644 --- a/package.json +++ b/package.json @@ -41,7 +41,7 @@ "docs": "aegir docs" }, "dependencies": { - "cbor": "^7.0.2", + "borc": "^2.1.2", "cids": "^1.0.0", "datastore-core": "^3.0.0", "debug": "^4.1.0", diff --git a/test/migrations/migration-9-test.js b/test/migrations/migration-9-test.js index 8294f30..808053f 100644 --- a/test/migrations/migration-9-test.js +++ b/test/migrations/migration-9-test.js @@ -3,7 +3,7 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const cbor = require('cbor') +const cbor = require('borc') const migration = require('../../migrations/migration-9') const { cidToKey, PIN_DS_KEY } = require('../../migrations/migration-9/utils') const { createStore } = require('../../src/utils') From f8ca6fcebdb969e698cc4daf6091a5065ef73deb Mon Sep 17 00:00:00 2001 From: achingbrain Date: Tue, 2 Mar 2021 15:55:40 +0000 Subject: [PATCH 07/21] chore: remove debug --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 5becca6..d11a189 100644 --- a/.travis.yml +++ b/.travis.yml @@ -34,13 +34,13 @@ jobs: name: chrome addons: chrome: stable - script: npx aegir test -t browser --debug + script: npx aegir test -t browser - stage: test name: firefox addons: firefox: latest - script: npx aegir test -t browser -- --browsers firefox --debug + script: npx aegir test -t browser -- --browsers firefox - stage: test name: electron-main From 5ffbe21c144141dc66eccd4d916465a189a29ac2 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Tue, 2 Mar 2021 15:58:45 +0000 Subject: [PATCH 08/21] chore: use correct cli arg --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index d11a189..55a0a5d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -40,7 +40,7 @@ jobs: name: firefox addons: firefox: latest - script: npx aegir test -t browser -- --browsers firefox + script: npx aegir test -t browser -- --browser firefox - stage: test name: electron-main From f90a683ba32da4041c9164bc466ef740d9ecba95 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Tue, 2 Mar 2021 16:24:14 +0000 Subject: [PATCH 09/21] chore: run on ubuntu 18.09 --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 55a0a5d..f2fee79 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,5 +1,6 @@ language: node_js cache: npm +dist: bionic branches: only: From 8677cc6fbf5667f88f7aef0bb75f829d39e01231 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Wed, 3 Mar 2021 16:34:10 +0000 Subject: [PATCH 10/21] chore: swap borc for cborg and add types --- .aegir.js | 12 ++++ migrations/index.js | 5 ++ migrations/migration-10/index.js | 99 +++++++++++++++++++++++------ migrations/migration-8/index.js | 41 ++++++++---- migrations/migration-9/index.js | 58 +++++++++++------ migrations/migration-9/pin-set.js | 64 +++++++++++++++++-- migrations/migration-9/utils.js | 3 + package.json | 17 +++-- src/errors.js | 33 +++++++--- src/index.js | 57 +++++++++-------- src/repo/init.js | 10 ++- src/repo/lock-memory.js | 7 +- src/repo/lock.js | 16 +++-- src/repo/version.js | 25 +++----- src/types.d.ts | 11 ++++ src/utils.js | 26 +++++++- test/fixtures/generate-car-files.js | 15 ++--- test/migrations/migration-9-test.js | 29 +++++++-- tsconfig.json | 10 +++ 19 files changed, 397 insertions(+), 141 deletions(-) create mode 100644 src/types.d.ts create mode 100644 tsconfig.json diff --git a/.aegir.js b/.aegir.js index 47a415a..20c3e3e 100644 --- a/.aegir.js +++ b/.aegir.js @@ -12,6 +12,18 @@ const esbuild = { build.onResolve({ filter: /^stream$/ }, () => { return { path: require.resolve('readable-stream') } }) + build.onResolve({ filter: /^multiformats\/hashes\/digest$/ }, () => { + // remove when https://github.com/evanw/esbuild/issues/187 is fixed + return { path: require.resolve('multiformats/hashes/digest') } + }) + build.onResolve({ filter: /^multiformats$/ }, () => { + // remove when https://github.com/evanw/esbuild/issues/187 is fixed + return { path: require.resolve('multiformats') } + }) + build.onResolve({ filter: /^cborg$/ }, () => { + // remove when https://github.com/evanw/esbuild/issues/187 is fixed + return { path: require.resolve('cborg') } + }) } } ] diff --git a/migrations/index.js b/migrations/index.js index 91b83df..3913921 100644 --- a/migrations/index.js +++ b/migrations/index.js @@ -1,8 +1,13 @@ 'use strict' +/** + * @type {import('../src/types').Migration} + */ const emptyMigration = { description: 'Empty migration.', + // @ts-ignore migrate: () => {}, + // @ts-ignore revert: () => {}, empty: true } diff --git a/migrations/migration-10/index.js b/migrations/migration-10/index.js index 3a96154..b91f084 100644 --- a/migrations/migration-10/index.js +++ b/migrations/migration-10/index.js @@ -4,10 +4,24 @@ const { createStore, findLevelJs } = require('../../src/utils') -const { Key } = require('interface-datastore') const fromString = require('uint8arrays/from-string') const toString = require('uint8arrays/to-string') +/** + * @typedef {import('../../src/types').Migration} Migration + * @typedef {import('interface-datastore').Datastore} Datastore + * @typedef {import('../../src/types').MigrationProgressCallback} MigrationProgressCallback + * + * @typedef {{ type: 'del', key: string | Uint8Array } | { type: 'put', key: string | Uint8Array, value: Uint8Array }} Operation + * @typedef {function (string, Uint8Array): Operation[]} UpgradeFunction + * @typedef {function (Uint8Array, Uint8Array): Operation[]} DowngradeFunction + */ + + /** + * @param {string} name + * @param {Datastore} store + * @param {(message: string) => void} onProgress + */ async function keysToBinary (name, store, onProgress = () => {}) { let db = findLevelJs(store) @@ -20,14 +34,24 @@ async function keysToBinary (name, store, onProgress = () => {}) { onProgress(`Upgrading ${name}`) - await withEach(db, (key, value) => { + /** + * @type {UpgradeFunction} + */ + const upgrade = (key, value) => { return [ { type: 'del', key: key }, { type: 'put', key: fromString(key), value: value } ] - }) + } + + await withEach(db, upgrade) } + /** + * @param {string} name + * @param {Datastore} store + * @param {(message: string) => void} onProgress + */ async function keysToStrings (name, store, onProgress = () => {}) { let db = findLevelJs(store) @@ -40,14 +64,26 @@ async function keysToStrings (name, store, onProgress = () => {}) { onProgress(`Downgrading ${name}`) - await withEach(db, (key, value) => { + /** + * @type {DowngradeFunction} + */ + const downgrade = (key, value) => { return [ { type: 'del', key: key }, { type: 'put', key: toString(key), value: value } ] - }) + } + + await withEach(db, downgrade) } +/** + * + * @param {string} repoPath + * @param {any} repoOptions + * @param {MigrationProgressCallback} onProgress + * @param {*} fn + */ async function process (repoPath, repoOptions, onProgress, fn) { const datastores = Object.keys(repoOptions.storageBackends) .filter(key => repoOptions.storageBackends[key].name === 'LevelDatastore') @@ -63,9 +99,14 @@ async function process (repoPath, repoOptions, onProgress, fn) { await store.open() try { - await fn(name, store, (message) => { - onProgress(parseInt((migrated / datastores.length) * 100), message) - }) + /** + * @param {string} message + */ + const progress = (message) => { + onProgress(Math.round((migrated / datastores.length) * 100), message) + } + + await fn(name, store, progress) } finally { migrated++ store.close() @@ -75,6 +116,7 @@ async function process (repoPath, repoOptions, onProgress, fn) { onProgress(100, `Migrated ${datastores.length} dbs`) } +/** @type {Migration} */ module.exports = { version: 10, description: 'Migrates datastore-level keys to binary', @@ -87,23 +129,25 @@ module.exports = { } /** - * @typedef {Uint8Array|string} Key - * @typedef {Uint8Array} Value - * @typedef {{ type: 'del', key: Key } | { type: 'put', key: Key, value: Value }} Operation - * * Uses the upgrade strategy from level-js@5.x.x - note we can't call the `.upgrade` command * directly because it will be removed in level-js@6.x.x and we can't guarantee users will * have migrated by then - e.g. they may jump from level-js@4.x.x straight to level-js@6.x.x * so we have to duplicate the code here. * - * @param {import('interface-datastore').Datastore} db - * @param {function (Key, Value): Operation[]} fn + * @param {any} db + * @param {UpgradeFunction | DowngradeFunction} fn + * @return {Promise} */ function withEach (db, fn) { + /** + * @param {Operation[]} operations + * @param {(error?: Error) => void} next + */ function batch (operations, next) { const store = db.store('readwrite') const transaction = store.transaction let index = 0 + /** @type {Error | undefined} */ let error transaction.onabort = () => next(error || transaction.error || new Error('aborted by user')) @@ -132,26 +176,43 @@ function withEach (db, fn) { return new Promise((resolve, reject) => { const it = db.iterator() // raw keys and values only - it._deserializeKey = it._deserializeValue = (data) => data + /** + * @template T + * @param {T} data + */ + const id = (data) => data + it._deserializeKey = it._deserializeValue = id next() function next () { - it.next((err, key, value) => { + /** + * @param {Error | undefined} err + * @param {string | undefined} key + * @param {Uint8Array} value + */ + const handleNext = (err, key, value) => { if (err || key === undefined) { - it.end((err2) => { + /** + * @param {Error | undefined} err2 + */ + const handleEnd = (err2) => { if (err2) { reject(err2) return } resolve() - }) + } + + it.end(handleEnd) return } + // @ts-ignore batch(fn(key, value), next) - }) + } + it.next(handleNext) } }) } diff --git a/migrations/migration-8/index.js b/migrations/migration-8/index.js index f624529..044ba57 100644 --- a/migrations/migration-8/index.js +++ b/migrations/migration-8/index.js @@ -8,6 +8,13 @@ const uint8ArrayToString = require('uint8arrays/to-string') const { createStore } = require('../../src/utils') const length = require('it-length') +/** + * @typedef {import('../../src/types').Migration} Migration + */ + +/** + * @param {Key} key + */ function keyToMultihash (key) { const buf = mb.decode(`b${key.toString().slice(1)}`) @@ -18,11 +25,14 @@ function keyToMultihash (key) { multihash = mb.encode('base32', multihash).slice(1) // Should be uppercase for interop with go - multihash = uint8ArrayToString(multihash).toUpperCase() + const multihashStr = uint8ArrayToString(multihash).toUpperCase() - return new Key(`/${multihash}`, false) + return new Key(`/${multihashStr}`, false) } +/** + * @param {Key} key + */ function keyToCid (key) { const buf = mb.decode(`b${key.toString().slice(1)}`) @@ -32,22 +42,26 @@ function keyToCid (key) { return new Key(`/${uint8ArrayToString(multihash)}`.toUpperCase(), false) } +/** + * @param {string} repoPath + * @param {*} repoOptions + * @param {(percent: number, message: string) => void} onProgress + * @param {(key: Key) => Key} keyFunction + */ async function process (repoPath, repoOptions, onProgress, keyFunction) { const blockstore = createStore(repoPath, 'blocks', repoOptions) await blockstore.open() let blockCount - if (onProgress) { - blockCount = await length(blockstore.query({ - keysOnly: true, - filters: [({ key }) => { - const newKey = keyFunction(key) + blockCount = await length(blockstore.query({ + keysOnly: true, + filters: [({ key }) => { + const newKey = keyFunction(key) - return newKey.toString() !== key.toString() - }] - })) - } + return newKey.toString() !== key.toString() + }] + })) try { let counter = 0 @@ -62,9 +76,7 @@ async function process (repoPath, repoOptions, onProgress, keyFunction) { await blockstore.delete(block.key) await blockstore.put(newKey, block.value) - if (onProgress) { - onProgress((counter / blockCount) * 100, `Migrated Block from ${block.key} to ${newKey}`) - } + onProgress((counter / blockCount) * 100, `Migrated Block from ${block.key} to ${newKey}`) } } } finally { @@ -72,6 +84,7 @@ async function process (repoPath, repoOptions, onProgress, keyFunction) { } } +/** @type {Migration} */ module.exports = { version: 8, description: 'Transforms key names into base32 encoding and converts Block store to use bare multihashes encoded as base32', diff --git a/migrations/migration-9/index.js b/migrations/migration-9/index.js index cae54dd..cb7de7f 100644 --- a/migrations/migration-9/index.js +++ b/migrations/migration-9/index.js @@ -2,7 +2,8 @@ const CID = require('cids') const dagpb = require('ipld-dag-pb') -const cbor = require('borc') +// @ts-ignore https://github.com/rvagg/cborg/pull/5 +const cbor = require('cborg') const multicodec = require('multicodec') const multibase = require('multibase') const pinset = require('./pin-set') @@ -10,6 +11,19 @@ const { createStore } = require('../../src/utils') const { cidToKey, PIN_DS_KEY, PinTypes } = require('./utils') const length = require('it-length') +/** + * @typedef {import('../../src/types').Migration} Migration + * @typedef {import('../../src/types').MigrationProgressCallback} MigrationProgressCallback + * @typedef {import('interface-datastore').Datastore} Datastore + * @typedef {import('multicodec').CodecCode} CodecCode + */ + + /** + * @param {Datastore} blockstore + * @param {Datastore} datastore + * @param {Datastore} pinstore + * @param {MigrationProgressCallback} onProgress + */ async function pinsToDatastore (blockstore, datastore, pinstore, onProgress) { if (!await datastore.has(PIN_DS_KEY)) { return @@ -22,12 +36,12 @@ async function pinsToDatastore (blockstore, datastore, pinstore, onProgress) { let counter = 0 let pinCount - if (onProgress) { - pinCount = (await length(pinset.loadSet(blockstore, pinRoot, PinTypes.recursive))) + (await length(pinset.loadSet(blockstore, pinRoot, PinTypes.direct))) - } + pinCount = (await length(pinset.loadSet(blockstore, pinRoot, PinTypes.recursive))) + (await length(pinset.loadSet(blockstore, pinRoot, PinTypes.direct))) for await (const cid of pinset.loadSet(blockstore, pinRoot, PinTypes.recursive)) { counter++ + + /** @type {{ depth: number, version?: CID.CIDVersion, codec?: CodecCode }} */ const pin = { depth: Infinity } @@ -42,13 +56,13 @@ async function pinsToDatastore (blockstore, datastore, pinstore, onProgress) { await pinstore.put(cidToKey(cid), cbor.encode(pin)) - if (onProgress) { - onProgress((counter / pinCount) * 100, `Migrated recursive pin ${cid}`) - } + onProgress((counter / pinCount) * 100, `Migrated recursive pin ${cid}`) } for await (const cid of pinset.loadSet(blockstore, pinRoot, PinTypes.direct)) { counter++ + + /** @type {{ depth: number, version?: CID.CIDVersion, codec?: CodecCode }} */ const pin = { depth: 0 } @@ -70,15 +84,17 @@ async function pinsToDatastore (blockstore, datastore, pinstore, onProgress) { await datastore.delete(PIN_DS_KEY) } +/** + * @param {Datastore} blockstore + * @param {Datastore} datastore + * @param {Datastore} pinstore + * @param {MigrationProgressCallback} onProgress + */ async function pinsToDAG (blockstore, datastore, pinstore, onProgress) { let recursivePins = [] let directPins = [] let counter = 0 - let pinCount - - if (onProgress) { - pinCount = await length(pinstore.query({ keysOnly: true })) - } + const pinCount = await length(pinstore.query({ keysOnly: true })) for await (const { key, value } of pinstore.query({})) { counter++ @@ -86,15 +102,11 @@ async function pinsToDAG (blockstore, datastore, pinstore, onProgress) { const cid = new CID(pin.version || 0, pin.codec && multicodec.getName(pin.codec) || 'dag-pb', multibase.decode('b' + key.toString().split('/').pop())) if (pin.depth === 0) { - if (onProgress) { - onProgress((counter / pinCount) * 100, `Reverted direct pin ${cid}`) - } + onProgress((counter / pinCount) * 100, `Reverted direct pin ${cid}`) directPins.push(cid) } else { - if (onProgress) { - onProgress((counter / pinCount) * 100, `Reverted recursive pin ${cid}`) - } + onProgress((counter / pinCount) * 100, `Reverted recursive pin ${cid}`) recursivePins.push(cid) } @@ -107,13 +119,18 @@ async function pinsToDAG (blockstore, datastore, pinstore, onProgress) { ]) const buf = pinRoot.serialize() const cid = await dagpb.util.cid(buf, { - cidVersion: 0, - hashAlg: multicodec.SHA2_256, + cidVersion: 0 }) await blockstore.put(cidToKey(cid), buf) await datastore.put(PIN_DS_KEY, cid.multihash) } +/** + * @param {string} repoPath + * @param {*} repoOptions + * @param {MigrationProgressCallback} onProgress + * @param {*} fn + */ async function process (repoPath, repoOptions, onProgress, fn) { const blockstore = createStore(repoPath, 'blocks', repoOptions) const datastore = createStore(repoPath, 'datastore', repoOptions) @@ -132,6 +149,7 @@ async function process (repoPath, repoOptions, onProgress, fn) { } } +/** @type {Migration} */ module.exports = { version: 9, description: 'Migrates pins to datastore', diff --git a/migrations/migration-9/pin-set.js b/migrations/migration-9/pin-set.js index 3bb7e1f..22ed74d 100644 --- a/migrations/migration-9/pin-set.js +++ b/migrations/migration-9/pin-set.js @@ -1,12 +1,16 @@ 'use strict' const CID = require('cids') +// @ts-ignore const protobuf = require('protons') +// @ts-ignore const fnv1a = require('fnv1a') const varint = require('varint') const dagpb = require('ipld-dag-pb') -const { DAGNode, DAGLink } = dagpb -const multicodec = require('multicodec') +const DAGNode = require('ipld-dag-pb/src/dag-node/dagNode') +const DAGLink = require('ipld-dag-pb/src/dag-link/dagLink') +const multihash = require('multihashing-async').multihash +// @ts-ignore const pbSchema = require('./pin.proto') const { cidToKey, DEFAULT_FANOUT, MAX_ITEMS, EMPTY_KEY } = require('./utils') const uint8ArrayConcat = require('uint8arrays/concat') @@ -17,10 +21,24 @@ const uint8ArrayEquals = require('uint8arrays/equals') const pb = protobuf(pbSchema) +/** + * @typedef {import('interface-datastore').Datastore} Datastore + * + * @typedef {object} Pin + * @property {CID} key + * @property {Uint8Array} [data] + */ + +/** + * @param {Uint8Array | CID} hash + */ function toB58String (hash) { return new CID(hash).toBaseEncodedString() } +/** + * @param {DAGNode} rootNode + */ function readHeader (rootNode) { // rootNode.data should be a buffer of the format: // < varint(headerLength) | header | itemData... > @@ -53,6 +71,10 @@ function readHeader (rootNode) { } } +/** + * @param {number} seed + * @param {CID} key + */ function hash (seed, key) { const buffer = new Uint8Array(4) const dataView = new DataView(buffer.buffer) @@ -63,6 +85,11 @@ function hash (seed, key) { return fnv1a(uint8ArrayToString(data)) } +/** + * @param {Datastore} blockstore + * @param {DAGNode} node + * @returns {AsyncGenerator} + */ async function * walkItems (blockstore, node) { const pbh = readHeader(node) let idx = 0 @@ -89,6 +116,11 @@ async function * walkItems (blockstore, node) { } } +/** + * @param {Datastore} blockstore + * @param {DAGNode} rootNode + * @param {string} name + */ async function * loadSet (blockstore, rootNode, name) { const link = rootNode.Links.find(l => l.Name === name) @@ -102,9 +134,17 @@ async function * loadSet (blockstore, rootNode, name) { yield * walkItems(blockstore, node) } +/** + * @param {Datastore} blockstore + * @param {Pin[]} items + */ function storeItems (blockstore, items) { return storePins(items, 0) + /** + * @param {Pin[]} pins + * @param {number} depth + */ async function storePins (pins, depth) { const pbHeader = pb.Set.encode({ version: 1, @@ -148,8 +188,10 @@ function storeItems (blockstore, items) { // (using go-ipfs' "wasteful but simple" approach for consistency) // https://github.com/ipfs/go-ipfs/blob/master/pin/set.go#L57 + /** @type {Pin[][]} */ const bins = pins.reduce((bins, pin) => { const n = hash(depth, pin.key) % DEFAULT_FANOUT + // @ts-ignore bins[n] = n in bins ? bins[n].concat([pin]) : [pin] return bins }, []) @@ -166,11 +208,15 @@ function storeItems (blockstore, items) { return new DAGNode(headerBuf, fanoutLinks) } + /** + * @param {DAGNode} child + * @param {number} binIdx + */ async function storeChild (child, binIdx) { const buf = dagpb.util.serialize(child) const cid = await dagpb.util.cid(buf, { cidVersion: 0, - hashAlg: multicodec.SHA2_256, + hashAlg: multihash.names['sha2-256'] }) await blockstore.put(cidToKey(cid), buf) @@ -179,17 +225,21 @@ function storeItems (blockstore, items) { } } +/** + * @param {Datastore} blockstore + * @param {string} type + * @param {CID[]} cids + */ async function storeSet (blockstore, type, cids) { const rootNode = await storeItems(blockstore, cids.map(cid => { return { - key: cid, - data: null + key: cid } })) - const buf = rootNode.serialize(rootNode) + const buf = rootNode.serialize() const cid = await dagpb.util.cid(buf, { cidVersion: 0, - hashAlg: multicodec.SHA2_256 + hashAlg: multihash.names['sha2-256'] }) await blockstore.put(cidToKey(cid), buf) diff --git a/migrations/migration-9/utils.js b/migrations/migration-9/utils.js index a8d6144..68dab62 100644 --- a/migrations/migration-9/utils.js +++ b/migrations/migration-9/utils.js @@ -14,6 +14,9 @@ const PinTypes = { recursive: 'recursive' } +/** + * @param {import('cids')} cid + */ function cidToKey (cid) { return new Key(`/${multibase.encoding('base32upper').encode(cid.multihash)}`) } diff --git a/package.json b/package.json index 414fbd1..41c08ff 100644 --- a/package.json +++ b/package.json @@ -17,16 +17,19 @@ "src", "dist" ], + "types": "./dist/src/index.d.ts", "main": "src/index.js", "browser": { "./src/repo/lock.js": "./src/repo/lock-memory.js", - "datastore-fs": "datastore-level" + "datastore-fs": "datastore-level", + "@ipld/car": "@ipld/car/esm/car-browser.js" }, "repository": { "type": "git", "url": "https://github.com/ipfs/js-ipfs-repo-migrations.git" }, "scripts": { + "prepare": "aegir build --no-bundle", "new-migration": "./src/cli.js add", "test": "aegir test", "test:node": "aegir test --target node", @@ -41,7 +44,7 @@ "docs": "aegir docs" }, "dependencies": { - "borc": "^2.1.2", + "cborg": "^1.0.4", "cids": "^1.0.0", "datastore-core": "^3.0.0", "debug": "^4.1.0", @@ -49,8 +52,8 @@ "interface-datastore": "^3.0.3", "ipld-dag-pb": "^0.21.0", "it-length": "^1.0.1", - "multibase": "^3.0.0", - "multicodec": "^2.0.0", + "multibase": "^4.0.1", + "multicodec": "^3.0.1", "multihashing-async": "^2.0.0", "proper-lockfile": "^4.1.1", "protons": "^2.0.0", @@ -58,13 +61,17 @@ "varint": "^6.0.0" }, "devDependencies": { + "@ipld/car": "^0.1.2", + "@types/debug": "^4.1.5", + "@types/varint": "^6.0.0", "aegir": "^31.0.0", "assert": "^2.0.0", - "datastore-car": "^1.2.0", "datastore-fs": "^3.0.0", "datastore-level": "^4.0.0", "events": "^3.2.0", "it-all": "^1.0.2", + "it-map": "^1.0.5", + "it-parallel-batch": "^1.0.9", "just-safe-set": "^2.1.0", "level-5": "npm:level@^5.0.0", "level-6": "npm:level@^6.0.0", diff --git a/src/errors.js b/src/errors.js index 592f783..1bf3929 100644 --- a/src/errors.js +++ b/src/errors.js @@ -5,6 +5,9 @@ * to revert. */ class NonReversibleMigrationError extends Error { + /** + * @param {string} message + */ constructor (message) { super(message) this.name = 'NonReversibleMigrationError' @@ -12,14 +15,15 @@ class NonReversibleMigrationError extends Error { this.message = message } } - NonReversibleMigrationError.code = 'ERR_NON_REVERSIBLE_MIGRATION' -exports.NonReversibleMigrationError = NonReversibleMigrationError /** * Exception raised when repo is not initialized. */ class NotInitializedRepoError extends Error { + /** + * @param {string} message + */ constructor (message) { super(message) this.name = 'NotInitializedRepoError' @@ -27,14 +31,15 @@ class NotInitializedRepoError extends Error { this.message = message } } - NotInitializedRepoError.code = 'ERR_NOT_INITIALIZED_REPO' -exports.NotInitializedRepoError = NotInitializedRepoError /** * Exception raised when required parameter is not provided. */ class RequiredParameterError extends Error { + /** + * @param {string} message + */ constructor (message) { super(message) this.name = 'RequiredParameterError' @@ -42,14 +47,15 @@ class RequiredParameterError extends Error { this.message = message } } - RequiredParameterError.code = 'ERR_REQUIRED_PARAMETER' -exports.RequiredParameterError = RequiredParameterError /** * Exception raised when value is not valid. */ class InvalidValueError extends Error { + /** + * @param {string} message + */ constructor (message) { super(message) this.name = 'InvalidValueError' @@ -57,14 +63,15 @@ class InvalidValueError extends Error { this.message = message } } - InvalidValueError.code = 'ERR_INVALID_VALUE' -exports.InvalidValueError = InvalidValueError /** * Exception raised when config is not passed. */ class MissingRepoOptionsError extends Error { + /** + * @param {string} message + */ constructor (message) { super(message) this.name = 'MissingRepoOptionsError' @@ -72,6 +79,12 @@ class MissingRepoOptionsError extends Error { this.message = message } } - MissingRepoOptionsError.code = 'ERR_MISSING_REPO_OPTIONS' -exports.MissingRepoOptionsError = MissingRepoOptionsError + +module.exports = { + NonReversibleMigrationError, + NotInitializedRepoError, + RequiredParameterError, + InvalidValueError, + MissingRepoOptionsError +} diff --git a/src/index.js b/src/index.js index 8d9e459..753bd6d 100644 --- a/src/index.js +++ b/src/index.js @@ -1,3 +1,4 @@ +/* eslint complexity: ["error", 27] */ 'use strict' const defaultMigrations = require('../migrations') @@ -7,6 +8,12 @@ const errors = require('./errors') const log = require('debug')('ipfs:repo:migrator') +/** + * @typedef {import('./types').Migration} Migration + * @typedef {import('./types').ProgressCallback} ProgressCallback + * @typedef {import('./types').MigrationProgressCallback} MigrationProgressCallback + */ + exports.getCurrentRepoVersion = repoVersion.getVersion exports.errors = errors @@ -14,8 +21,7 @@ exports.errors = errors * Returns the version of latest migration. * If no migrations are present returns 0. * - * @param {?Array} migrations - Array of migrations to consider. If undefined, the bundled migrations are used. Mainly for testing purpose. - * @returns {int} + * @param {Migration[]} [migrations] - Array of migrations to consider. If undefined, the bundled migrations are used. Mainly for testing purpose. */ function getLatestMigrationVersion (migrations) { migrations = migrations || defaultMigrations @@ -36,14 +42,13 @@ exports.getLatestMigrationVersion = getLatestMigrationVersion * Signature of the progress callback is: function(migrationObject: object, currentMigrationNumber: int, totalMigrationsCount: int) * * @param {string} path - Path to initialized (!) JS-IPFS repo - * @param {Object} repoOptions - Options that are passed to migrations, that can use them to correctly construct datastore. Options are same like for IPFSRepo. - * @param {int} toVersion - Version to which the repo should be migrated. - * @param {Object?} options - Options for migration - * @param {boolean?} options.ignoreLock - Won't lock the repo for applying the migrations. Use with caution. - * @param {?Function} options.onProgress - Callback which will be called after each executed migration to report progress - * @param {boolean?} options.isDryRun - Allows to simulate the execution of the migrations without any effect. - * @param {?Array} options.migrations - Array of migrations to migrate. If undefined, the bundled migrations are used. Mainly for testing purpose. - * @returns {Promise} + * @param {object} repoOptions - Options that are passed to migrations, that can use them to correctly construct datastore. Options are same like for IPFSRepo. + * @param {number} toVersion - Version to which the repo should be migrated. + * @param {object} [options] - Options for migration + * @param {boolean} [options.ignoreLock] - Won't lock the repo for applying the migrations. Use with caution. + * @param {ProgressCallback} [options.onProgress] - Callback which will be called after each executed migration to report progress + * @param {boolean} [options.isDryRun] - Allows to simulate the execution of the migrations without any effect. + * @param {Migration[]} [options.migrations] - Array of migrations to perform. If undefined, the bundled migrations are used. Mainly for testing purpose. */ async function migrate (path, repoOptions, toVersion, { ignoreLock = false, onProgress, isDryRun = false, migrations }) { migrations = migrations || defaultMigrations @@ -97,7 +102,8 @@ async function migrate (path, repoOptions, toVersion, { ignoreLock = false, onPr try { if (!isDryRun) { - let progressCallback + /** @type {MigrationProgressCallback} */ + let progressCallback = () => {} if (onProgress) { // eslint-disable-line max-depth progressCallback = (percent, message) => onProgress(migration.version, percent.toFixed(2), message) @@ -123,7 +129,7 @@ async function migrate (path, repoOptions, toVersion, { ignoreLock = false, onPr log('Repo successfully migrated', toVersion !== undefined ? `to version ${toVersion}!` : 'to latest version!') } finally { - if (!isDryRun && !ignoreLock) { + if (!isDryRun && !ignoreLock && lock) { await lock.close() } } @@ -138,14 +144,13 @@ exports.migrate = migrate * Signature of the progress callback is: function(migrationObject: object, currentMigrationNumber: int, totalMigrationsCount: int) * * @param {string} path - Path to initialized (!) JS-IPFS repo - * @param {Object} repoOptions - Options that are passed to migrations, that can use them to correctly construct datastore. Options are same like for IPFSRepo. - * @param {int} toVersion - Version to which the repo will be reverted. - * @param {Object?} options - Options for the reversion - * @param {?Function} options.onProgress - Callback which will be called after each reverted migration to report progress - * @param {boolean?} options.isDryRun - Allows to simulate the execution of the reversion without any effects. Make sense to utilize onProgress with this argument. - * @param {boolean?} options.ignoreLock - Won't lock the repo for reverting the migrations. Use with caution. - * @param {?Array} options.migrations - Array of migrations to migrate. If undefined, the bundled migrations are used. Mainly for testing purpose. - * @returns {Promise} + * @param {object} repoOptions - Options that are passed to migrations, that can use them to correctly construct datastore. Options are same like for IPFSRepo. + * @param {number} toVersion - Version to which the repo will be reverted. + * @param {object} [options] - Options for the reversion + * @param {ProgressCallback} [options.onProgress] - Callback which will be called after each reverted migration to report progress + * @param {boolean} [options.isDryRun] - Allows to simulate the execution of the reversion without any effects. Make sense to utilize onProgress with this argument. + * @param {boolean} [options.ignoreLock] - Won't lock the repo for reverting the migrations. Use with caution. + * @param {Migration[]} [options.migrations] - Array of migrations to migrate. If undefined, the bundled migrations are used. Mainly for testing purpose. */ async function revert (path, repoOptions, toVersion, { ignoreLock = false, onProgress, isDryRun = false, migrations }) { migrations = migrations || defaultMigrations @@ -200,7 +205,8 @@ async function revert (path, repoOptions, toVersion, { ignoreLock = false, onPro try { if (!isDryRun) { - let progressCallback + /** @type {MigrationProgressCallback} */ + let progressCallback = () => {} if (onProgress) { // eslint-disable-line max-depth progressCallback = (percent, message) => onProgress(migration.version, percent.toFixed(2), message) @@ -226,7 +232,7 @@ async function revert (path, repoOptions, toVersion, { ignoreLock = false, onPro log(`All migrations successfully reverted to version ${toVersion}!`) } finally { - if (!isDryRun && !ignoreLock) { + if (!isDryRun && !ignoreLock && lock) { await lock.close() } } @@ -237,11 +243,10 @@ exports.revert = revert /** * Function checks if all migrations in given range are available. * - * @param {Array} migrations - * @param {int} fromVersion - * @param {int} toVersion + * @param {Migration[]} migrations + * @param {number} fromVersion + * @param {number} toVersion * @param {boolean} checkReversibility - Will additionally checks if all the migrations in the range are reversible - * @returns {void} */ function verifyAvailableMigrations (migrations, fromVersion, toVersion, checkReversibility = false) { let migrationCounter = 0 diff --git a/src/repo/init.js b/src/repo/init.js index edcf184..ad0b4c1 100644 --- a/src/repo/init.js +++ b/src/repo/init.js @@ -4,7 +4,11 @@ const log = require('debug')('ipfs:repo:migrator:repo:init') const { CONFIG_KEY, VERSION_KEY, createStore } = require('../utils') const { MissingRepoOptionsError } = require('../errors') -exports.isRepoInitialized = async function isRepoInitialized (path, repoOptions) { +/** + * @param {string} path + * @param {any} repoOptions + */ +async function isRepoInitialized (path, repoOptions) { if (!repoOptions) { throw new MissingRepoOptionsError('Please pass repo options when trying to open a repo') } @@ -31,3 +35,7 @@ exports.isRepoInitialized = async function isRepoInitialized (path, repoOptions) } } } + +module.exports = { + isRepoInitialized +} diff --git a/src/repo/lock-memory.js b/src/repo/lock-memory.js index 442367c..1ce232b 100644 --- a/src/repo/lock-memory.js +++ b/src/repo/lock-memory.js @@ -3,14 +3,17 @@ const debug = require('debug') const log = debug('ipfs:repo:migrator:repo_mem_lock') const lockFile = 'repo.lock' + +/** + * @type {Record} + */ const LOCKS = {} /** * Lock the repo in the given dir and for given repo version. * - * @param {int} version + * @param {number} version * @param {string} dir - * @returns {Promise} */ exports.lock = async function lock (version, dir) { // eslint-disable-line require-await const file = dir + '/' + lockFile diff --git a/src/repo/lock.js b/src/repo/lock.js index 245dc96..9d96c4a 100644 --- a/src/repo/lock.js +++ b/src/repo/lock.js @@ -1,22 +1,22 @@ 'use strict' const debug = require('debug') -const { lock } = require('proper-lockfile') +// @ts-ignore +const { lock: properLock } = require('proper-lockfile') const log = debug('ipfs:repo:migrator:repo_fs_lock') const lockFile = 'repo.lock' /** - * Lock the repo in the given dir and given repo's version. + * Lock the repo in the given dir and given version. * - * @param {int} version + * @param {number} version * @param {string} dir - * @returns {Promise} */ -exports.lock = async (version, dir) => { +async function lock (version, dir) { const file = `${dir}/${lockFile}` log('locking %s', file) - const release = await lock(dir, { lockfilePath: file }) + const release = await properLock(dir, { lockfilePath: file }) return { close: () => { log('releasing lock %s', file) @@ -24,3 +24,7 @@ exports.lock = async (version, dir) => { } } } + +module.exports = { + lock +} diff --git a/src/repo/version.js b/src/repo/version.js index 1aeabca..07ab88f 100644 --- a/src/repo/version.js +++ b/src/repo/version.js @@ -6,8 +6,6 @@ const { VERSION_KEY, createStore } = require('../utils') const uint8ArrayFromString = require('uint8arrays/from-string') const uint8ArrayToString = require('uint8arrays/to-string') -exports.getVersion = getVersion - /** * Function that has responsibility to retrieve version of repo from its root datastore's instance. * This function needs to be cross-repo-version functional to be able to fetch any version number, @@ -15,7 +13,6 @@ exports.getVersion = getVersion * * @param {string} path * @param {Object} repoOptions - Options used to create a repo, the same as pased to ipfs-repo - * @returns {Promise} */ async function getVersion (path, repoOptions) { if (!(await repoInit.isRepoInitialized(path, repoOptions))) { @@ -29,26 +26,19 @@ async function getVersion (path, repoOptions) { const store = createStore(path, 'root', repoOptions) await store.open() - let version = await store.get(VERSION_KEY) - - if (version instanceof Uint8Array) { - version = uint8ArrayToString(version) + try { + return parseInt(uint8ArrayToString(await store.get(VERSION_KEY))) + } finally { + await store.close() } - - version = parseInt(version) - - await store.close() - - return version } /** * Function for setting a version in cross-repo-version manner. * * @param {string} path - * @param {int} version + * @param {number} version * @param {Object} repoOptions - Options used to create a repo, the same as pased to ipfs-repo - * @returns {Promise} */ async function setVersion (path, version, repoOptions) { if (!repoOptions) { @@ -61,4 +51,7 @@ async function setVersion (path, version, repoOptions) { await store.close() } -exports.setVersion = setVersion +module.exports = { + getVersion, + setVersion +} diff --git a/src/types.d.ts b/src/types.d.ts new file mode 100644 index 0000000..87704d1 --- /dev/null +++ b/src/types.d.ts @@ -0,0 +1,11 @@ + +export type ProgressCallback = (version: number, progress: string, message: string) => void + +export type MigrationProgressCallback = (percent: number, message: string) => void + +export interface Migration { + version: number + description: string + migrate: (repoPath: string, repoOptions: any, onProgress: MigrationProgressCallback) => Promise, + revert: (repoPath: string, repoOptions: any, onProgress: MigrationProgressCallback) => Promise +} diff --git a/src/utils.js b/src/utils.js index d0748b1..854605a 100644 --- a/src/utils.js +++ b/src/utils.js @@ -8,13 +8,16 @@ const core = require('datastore-core') const ShardingStore = core.ShardingDatastore /** - * @typedef {import('interface-datastore').Key} Key * @typedef {import('interface-datastore').Datastore} Datastore */ const CONFIG_KEY = new Key('/config') const VERSION_KEY = new Key('/version') +/** + * @param {string} name + * @param {*} options + */ function getDatastoreAndOptions (name, options) { if (!options || !options.storageBackends) { throw new Error('Please pass storage backend definitions') @@ -43,14 +46,18 @@ function getDatastoreAndOptions (name, options) { * instance in the chain if one exists. * * @param {Datastore} store + * @returns {Datastore | undefined} */ function findLevelJs (store) { let db = store + // @ts-ignore while (db.db || db.child) { + // @ts-ignore db = db.db || db.child // `Level` is only present in the browser, in node it is LevelDOWN + // @ts-ignore if (db.type === 'level-js' || db.constructor.name === 'Level') { return db } @@ -61,6 +68,7 @@ function findLevelJs (store) { * @param {Key} key * @param {function (Key): Promise} has * @param {Datastore} store + * @returns {Promise} */ async function hasWithFallback (key, has, store) { const result = await has(key) @@ -80,6 +88,7 @@ async function hasWithFallback (key, has, store) { return new Promise((resolve, reject) => { // drop down to IndexDB API, otherwise level-js will monkey around with the keys/values + // @ts-ignore const req = levelJs.store('readonly').get(key.toString()) req.transaction.onabort = () => { reject(req.transaction.error) @@ -95,6 +104,7 @@ async function hasWithFallback (key, has, store) { * @param {function (Key): Promise} get * @param {function (Key): Promise} has * @param {import('interface-datastore').Datastore} store + * @returns {Promise} */ async function getWithFallback (key, get, has, store) { if (await has(key)) { @@ -112,6 +122,7 @@ async function getWithFallback (key, get, has, store) { return new Promise((resolve, reject) => { // drop down to IndexDB API, otherwise level-js will monkey around with the keys/values + // @ts-ignore const req = levelJs.store('readonly').get(key.toString()) req.transaction.onabort = () => { reject(req.transaction.error) @@ -126,6 +137,12 @@ async function getWithFallback (key, get, has, store) { }) } +/** + * @param {string} location + * @param {string} name + * @param {*} options + * @returns {Datastore} + */ function createStore (location, name, options) { const { StorageBackend, storageOptions } = getDatastoreAndOptions(name, options) @@ -135,6 +152,7 @@ function createStore (location, name, options) { let store = new StorageBackend(location, storageOptions) + // @ts-ignore if (storageOptions.sharding) { store = new ShardingStore(store, new core.shard.NextToLast(2)) } @@ -142,7 +160,13 @@ function createStore (location, name, options) { // necessary since level-js@5 cannot read keys from level-js@4 and earlier const originalGet = store.get.bind(store) const originalHas = store.has.bind(store) + /** + * @param {Key} key + */ store.get = (key) => getWithFallback(key, originalGet, originalHas, store) + /** + * @param {Key} key + */ store.has = (key) => hasWithFallback(key, originalHas, store) return store diff --git a/test/fixtures/generate-car-files.js b/test/fixtures/generate-car-files.js index 19360bf..f44b80e 100644 --- a/test/fixtures/generate-car-files.js +++ b/test/fixtures/generate-car-files.js @@ -10,8 +10,9 @@ const { const { Key } = require('interface-datastore') const PIN_DS_KEY = new Key('/local/pins') const fs = require('fs') -const CarDatastore = require('datastore-car') +const { CarWriter } = require('@ipld/car') const path = require('path') +const { Readable } = require('stream') const TO_PIN = 9000 @@ -97,12 +98,10 @@ const main = async () => { console.info(` root: new CID('${cid}'),`) - const outStream = fs.createWriteStream(path.join(__dirname, fileName)) - const car = await CarDatastore.writeStream(outStream) + const { writer, out } = await CarWriter.create([cid]) + Readable.from(out).pipe(fs.createWriteStream(path.join(__dirname, fileName))) - await car.setRoots(cid) - - await walk(cid, car) + await walk(cid, writer) let pins = 0 @@ -113,7 +112,7 @@ const main = async () => { console.info(` pins: ${pins}`) console.info(` }${more ? ',' : ''}`) - await car.close() + await writer.close() } async function walk (cid, car, cids = {}) { @@ -125,7 +124,7 @@ const main = async () => { const block = await ipfs.block.get(cid) - car.put(cid, block.data) + car.put({ cid, bytes: block.data }) const { value: node } = await ipfs.dag.get(cid) diff --git a/test/migrations/migration-9-test.js b/test/migrations/migration-9-test.js index 808053f..79becf8 100644 --- a/test/migrations/migration-9-test.js +++ b/test/migrations/migration-9-test.js @@ -3,14 +3,17 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const cbor = require('borc') +const cbor = require('cborg') const migration = require('../../migrations/migration-9') const { cidToKey, PIN_DS_KEY } = require('../../migrations/migration-9/utils') const { createStore } = require('../../src/utils') const CID = require('cids') -const CarDatastore = require('datastore-car') +const { CarReader } = require('@ipld/car') const loadFixture = require('aegir/utils/fixtures') const multibase = require('multibase') +const parallelBatch = require('it-parallel-batch') +const map = require('it-map') +const drain = require('it-drain') function pinToCid (key, pin) { const buf = multibase.encoding('base32upper').decode(key.toString().split('/').pop()) @@ -75,20 +78,34 @@ const nonDagPbDirectPins = [ ] async function bootstrapBlocks (blockstore, datastore, { car: carBuf, root: expectedRoot }) { - const car = await CarDatastore.readBuffer(carBuf) + const car = await CarReader.fromBytes(carBuf) const [actualRoot] = await car.getRoots() expect(actualRoot.toString()).to.equal(expectedRoot.toString()) await blockstore.open() - for await (const { key, value } of car.query()) { - await blockstore.put(cidToKey(new CID(key.toString())), value) + const concurrency = 50 + + /** + * @param {CID} cid + * @param {Uint8Array} bytes + */ + const put = (cid, bytes) => { + return blockstore.put(cidToKey(new CID(cid.toString())), bytes) } + // do the puts in parallel batches to make it faster + await drain( + parallelBatch( + map(car.blocks(), ({ cid, bytes }) => () => put(cid, bytes)), + concurrency + ) + ) + await blockstore.close() await datastore.open() - await datastore.put(PIN_DS_KEY, actualRoot.multihash) + await datastore.put(PIN_DS_KEY, actualRoot.multihash.bytes) await datastore.close() } diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000..75db3ca --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "./node_modules/aegir/src/config/tsconfig.aegir.json", + "compilerOptions": { + "outDir": "dist" + }, + "include": [ + "src", + "migrations" + ] +} From 022e9766621423333fdb81e56a59ff5dbde660d2 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Wed, 3 Mar 2021 16:41:48 +0000 Subject: [PATCH 11/21] chore: add missing dep --- package.json | 1 + 1 file changed, 1 insertion(+) diff --git a/package.json b/package.json index 41c08ff..2fe8669 100644 --- a/package.json +++ b/package.json @@ -70,6 +70,7 @@ "datastore-level": "^4.0.0", "events": "^3.2.0", "it-all": "^1.0.2", + "it-drain": "^1.0.4", "it-map": "^1.0.5", "it-parallel-batch": "^1.0.9", "just-safe-set": "^2.1.0", From 4ab4c1436824c7dc76f2d8c52163b75e812442df Mon Sep 17 00:00:00 2001 From: achingbrain Date: Wed, 3 Mar 2021 17:21:38 +0000 Subject: [PATCH 12/21] chore: remove protons --- migrations/migration-9/pin-set.js | 25 ++-- migrations/migration-9/pin.d.ts | 79 +++++++++++ migrations/migration-9/pin.js | 210 ++++++++++++++++++++++++++++ migrations/migration-9/pin.proto | 11 ++ migrations/migration-9/pin.proto.js | 19 --- package.json | 7 +- tsconfig.json | 3 + 7 files changed, 324 insertions(+), 30 deletions(-) create mode 100644 migrations/migration-9/pin.d.ts create mode 100644 migrations/migration-9/pin.js create mode 100644 migrations/migration-9/pin.proto delete mode 100644 migrations/migration-9/pin.proto.js diff --git a/migrations/migration-9/pin-set.js b/migrations/migration-9/pin-set.js index 22ed74d..10d2b39 100644 --- a/migrations/migration-9/pin-set.js +++ b/migrations/migration-9/pin-set.js @@ -1,8 +1,14 @@ 'use strict' const CID = require('cids') -// @ts-ignore -const protobuf = require('protons') +const { + ipfs: { + pin: { + Set: PinSet + } + } +} = require('./pin') + // @ts-ignore const fnv1a = require('fnv1a') const varint = require('varint') @@ -10,8 +16,6 @@ const dagpb = require('ipld-dag-pb') const DAGNode = require('ipld-dag-pb/src/dag-node/dagNode') const DAGLink = require('ipld-dag-pb/src/dag-link/dagLink') const multihash = require('multihashing-async').multihash -// @ts-ignore -const pbSchema = require('./pin.proto') const { cidToKey, DEFAULT_FANOUT, MAX_ITEMS, EMPTY_KEY } = require('./utils') const uint8ArrayConcat = require('uint8arrays/concat') const uint8ArrayCompare = require('uint8arrays/compare') @@ -19,8 +23,6 @@ const uint8ArrayToString = require('uint8arrays/to-string') const uint8ArrayFromString = require('uint8arrays/from-string') const uint8ArrayEquals = require('uint8arrays/equals') -const pb = protobuf(pbSchema) - /** * @typedef {import('interface-datastore').Datastore} Datastore * @@ -55,7 +57,12 @@ function readHeader (rootNode) { } const hdrSlice = rootData.slice(vBytes, hdrLength + vBytes) - const header = pb.Set.decode(hdrSlice) + const header = PinSet.toObject(PinSet.decode(hdrSlice), { + defaults: false, + arrays: true, + longs: Number, + objects: false + }) if (header.version !== 1) { throw new Error(`Unsupported Set version: ${header.version}`) @@ -146,11 +153,11 @@ function storeItems (blockstore, items) { * @param {number} depth */ async function storePins (pins, depth) { - const pbHeader = pb.Set.encode({ + const pbHeader = PinSet.encode({ version: 1, fanout: DEFAULT_FANOUT, seed: depth - }) + }).finish() const header = varint.encode(pbHeader.length) const headerBuf = uint8ArrayConcat([header, pbHeader]) diff --git a/migrations/migration-9/pin.d.ts b/migrations/migration-9/pin.d.ts new file mode 100644 index 0000000..935f21e --- /dev/null +++ b/migrations/migration-9/pin.d.ts @@ -0,0 +1,79 @@ +import * as $protobuf from "protobufjs"; +/** Namespace ipfs. */ +export namespace ipfs { + + /** Namespace pin. */ + namespace pin { + + /** Properties of a Set. */ + interface ISet { + + /** Set version */ + version?: (number|null); + + /** Set fanout */ + fanout?: (number|null); + + /** Set seed */ + seed?: (number|null); + } + + /** Represents a Set. */ + class Set implements ISet { + + /** + * Constructs a new Set. + * @param [p] Properties to set + */ + constructor(p?: ipfs.pin.ISet); + + /** Set version. */ + public version: number; + + /** Set fanout. */ + public fanout: number; + + /** Set seed. */ + public seed: number; + + /** + * Encodes the specified Set message. Does not implicitly {@link ipfs.pin.Set.verify|verify} messages. + * @param m Set message or plain object to encode + * @param [w] Writer to encode to + * @returns Writer + */ + public static encode(m: ipfs.pin.ISet, w?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Set message from the specified reader or buffer. + * @param r Reader or buffer to decode from + * @param [l] Message length if known beforehand + * @returns Set + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): ipfs.pin.Set; + + /** + * Creates a Set message from a plain object. Also converts values to their respective internal types. + * @param d Plain object + * @returns Set + */ + public static fromObject(d: { [k: string]: any }): ipfs.pin.Set; + + /** + * Creates a plain object from a Set message. Also converts values to other types if specified. + * @param m Set + * @param [o] Conversion options + * @returns Plain object + */ + public static toObject(m: ipfs.pin.Set, o?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Set to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } +} diff --git a/migrations/migration-9/pin.js b/migrations/migration-9/pin.js new file mode 100644 index 0000000..21b509e --- /dev/null +++ b/migrations/migration-9/pin.js @@ -0,0 +1,210 @@ +/*eslint-disable*/ +"use strict"; + +var $protobuf = require("protobufjs/minimal"); + +// Common aliases +var $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util; + +// Exported root namespace +var $root = $protobuf.roots["default"] || ($protobuf.roots["default"] = {}); + +$root.ipfs = (function() { + + /** + * Namespace ipfs. + * @exports ipfs + * @namespace + */ + var ipfs = {}; + + ipfs.pin = (function() { + + /** + * Namespace pin. + * @memberof ipfs + * @namespace + */ + var pin = {}; + + pin.Set = (function() { + + /** + * Properties of a Set. + * @memberof ipfs.pin + * @interface ISet + * @property {number|null} [version] Set version + * @property {number|null} [fanout] Set fanout + * @property {number|null} [seed] Set seed + */ + + /** + * Constructs a new Set. + * @memberof ipfs.pin + * @classdesc Represents a Set. + * @implements ISet + * @constructor + * @param {ipfs.pin.ISet=} [p] Properties to set + */ + function Set(p) { + if (p) + for (var ks = Object.keys(p), i = 0; i < ks.length; ++i) + if (p[ks[i]] != null) + this[ks[i]] = p[ks[i]]; + } + + /** + * Set version. + * @member {number} version + * @memberof ipfs.pin.Set + * @instance + */ + Set.prototype.version = 0; + + /** + * Set fanout. + * @member {number} fanout + * @memberof ipfs.pin.Set + * @instance + */ + Set.prototype.fanout = 0; + + /** + * Set seed. + * @member {number} seed + * @memberof ipfs.pin.Set + * @instance + */ + Set.prototype.seed = 0; + + /** + * Encodes the specified Set message. Does not implicitly {@link ipfs.pin.Set.verify|verify} messages. + * @function encode + * @memberof ipfs.pin.Set + * @static + * @param {ipfs.pin.ISet} m Set message or plain object to encode + * @param {$protobuf.Writer} [w] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Set.encode = function encode(m, w) { + if (!w) + w = $Writer.create(); + if (m.version != null && Object.hasOwnProperty.call(m, "version")) + w.uint32(8).uint32(m.version); + if (m.fanout != null && Object.hasOwnProperty.call(m, "fanout")) + w.uint32(16).uint32(m.fanout); + if (m.seed != null && Object.hasOwnProperty.call(m, "seed")) + w.uint32(29).fixed32(m.seed); + return w; + }; + + /** + * Decodes a Set message from the specified reader or buffer. + * @function decode + * @memberof ipfs.pin.Set + * @static + * @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from + * @param {number} [l] Message length if known beforehand + * @returns {ipfs.pin.Set} Set + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Set.decode = function decode(r, l) { + if (!(r instanceof $Reader)) + r = $Reader.create(r); + var c = l === undefined ? r.len : r.pos + l, m = new $root.ipfs.pin.Set(); + while (r.pos < c) { + var t = r.uint32(); + switch (t >>> 3) { + case 1: + m.version = r.uint32(); + break; + case 2: + m.fanout = r.uint32(); + break; + case 3: + m.seed = r.fixed32(); + break; + default: + r.skipType(t & 7); + break; + } + } + return m; + }; + + /** + * Creates a Set message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof ipfs.pin.Set + * @static + * @param {Object.} d Plain object + * @returns {ipfs.pin.Set} Set + */ + Set.fromObject = function fromObject(d) { + if (d instanceof $root.ipfs.pin.Set) + return d; + var m = new $root.ipfs.pin.Set(); + if (d.version != null) { + m.version = d.version >>> 0; + } + if (d.fanout != null) { + m.fanout = d.fanout >>> 0; + } + if (d.seed != null) { + m.seed = d.seed >>> 0; + } + return m; + }; + + /** + * Creates a plain object from a Set message. Also converts values to other types if specified. + * @function toObject + * @memberof ipfs.pin.Set + * @static + * @param {ipfs.pin.Set} m Set + * @param {$protobuf.IConversionOptions} [o] Conversion options + * @returns {Object.} Plain object + */ + Set.toObject = function toObject(m, o) { + if (!o) + o = {}; + var d = {}; + if (o.defaults) { + d.version = 0; + d.fanout = 0; + d.seed = 0; + } + if (m.version != null && m.hasOwnProperty("version")) { + d.version = m.version; + } + if (m.fanout != null && m.hasOwnProperty("fanout")) { + d.fanout = m.fanout; + } + if (m.seed != null && m.hasOwnProperty("seed")) { + d.seed = m.seed; + } + return d; + }; + + /** + * Converts this Set to JSON. + * @function toJSON + * @memberof ipfs.pin.Set + * @instance + * @returns {Object.} JSON object + */ + Set.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return Set; + })(); + + return pin; + })(); + + return ipfs; +})(); + +module.exports = $root; diff --git a/migrations/migration-9/pin.proto b/migrations/migration-9/pin.proto new file mode 100644 index 0000000..4e1f33f --- /dev/null +++ b/migrations/migration-9/pin.proto @@ -0,0 +1,11 @@ +syntax = "proto2"; + +package ipfs.pin; + +option go_package = "pb"; + +message Set { + optional uint32 version = 1; + optional uint32 fanout = 2; + optional fixed32 seed = 3; +} diff --git a/migrations/migration-9/pin.proto.js b/migrations/migration-9/pin.proto.js deleted file mode 100644 index 8e94fd8..0000000 --- a/migrations/migration-9/pin.proto.js +++ /dev/null @@ -1,19 +0,0 @@ -'use strict' - -/** - * Protobuf interface - * from go-ipfs/pin/internal/pb/header.proto - */ -module.exports = ` - syntax = "proto2"; - - package ipfs.pin; - - option go_package = "pb"; - - message Set { - optional uint32 version = 1; - optional uint32 fanout = 2; - optional fixed32 seed = 3; - } -` diff --git a/package.json b/package.json index 2fe8669..3900615 100644 --- a/package.json +++ b/package.json @@ -29,7 +29,10 @@ "url": "https://github.com/ipfs/js-ipfs-repo-migrations.git" }, "scripts": { - "prepare": "aegir build --no-bundle", + "prepare": "run-s prepare:*", + "prepare:proto": "pbjs -t static-module -w commonjs --force-number --no-verify --no-delimited --no-create --no-beautify --no-defaults --lint eslint-disable -o migrations/migration-9/pin.js migrations/migration-9/pin.proto", + "prepare:proto-types": "pbts -o migrations/migration-9/pin.d.ts migrations/migration-9/pin.js", + "prepare:types": "aegir build --no-bundle", "new-migration": "./src/cli.js add", "test": "aegir test", "test:node": "aegir test --target node", @@ -56,7 +59,6 @@ "multicodec": "^3.0.1", "multihashing-async": "^2.0.0", "proper-lockfile": "^4.1.1", - "protons": "^2.0.0", "uint8arrays": "^2.0.5", "varint": "^6.0.0" }, @@ -77,6 +79,7 @@ "level-5": "npm:level@^5.0.0", "level-6": "npm:level@^6.0.0", "ncp": "^2.0.0", + "npm-run-all": "^4.1.5", "readable-stream": "^3.6.0", "rimraf": "^3.0.0", "sinon": "^9.0.2", diff --git a/tsconfig.json b/tsconfig.json index 75db3ca..91b32ca 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -6,5 +6,8 @@ "include": [ "src", "migrations" + ], + "exclude": [ + "migrations/migration-9/pin.js" ] } From 17aeace7be7051dd5908abf0e2059aa1c7020030 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Wed, 3 Mar 2021 19:40:55 +0000 Subject: [PATCH 13/21] chore: batch up imported blocks --- package.json | 4 +--- test/migrations/migration-9-test.js | 26 +++++++++----------------- 2 files changed, 10 insertions(+), 20 deletions(-) diff --git a/package.json b/package.json index 3900615..1c5af30 100644 --- a/package.json +++ b/package.json @@ -59,6 +59,7 @@ "multicodec": "^3.0.1", "multihashing-async": "^2.0.0", "proper-lockfile": "^4.1.1", + "protobufjs": "^6.10.2", "uint8arrays": "^2.0.5", "varint": "^6.0.0" }, @@ -72,9 +73,6 @@ "datastore-level": "^4.0.0", "events": "^3.2.0", "it-all": "^1.0.2", - "it-drain": "^1.0.4", - "it-map": "^1.0.5", - "it-parallel-batch": "^1.0.9", "just-safe-set": "^2.1.0", "level-5": "npm:level@^5.0.0", "level-6": "npm:level@^6.0.0", diff --git a/test/migrations/migration-9-test.js b/test/migrations/migration-9-test.js index 79becf8..f479007 100644 --- a/test/migrations/migration-9-test.js +++ b/test/migrations/migration-9-test.js @@ -11,9 +11,7 @@ const CID = require('cids') const { CarReader } = require('@ipld/car') const loadFixture = require('aegir/utils/fixtures') const multibase = require('multibase') -const parallelBatch = require('it-parallel-batch') -const map = require('it-map') -const drain = require('it-drain') +const batch = require('it-batch') function pinToCid (key, pin) { const buf = multibase.encoding('base32upper').decode(key.toString().split('/').pop()) @@ -86,21 +84,15 @@ async function bootstrapBlocks (blockstore, datastore, { car: carBuf, root: expe const concurrency = 50 - /** - * @param {CID} cid - * @param {Uint8Array} bytes - */ - const put = (cid, bytes) => { - return blockstore.put(cidToKey(new CID(cid.toString())), bytes) - } + for await (const b of batch(car.blocks(), concurrency)) { + const batch = blockstore.batch() + + for await (const { cid, bytes } of b) { + batch.put(cidToKey(new CID(cid.toString())), bytes) + } - // do the puts in parallel batches to make it faster - await drain( - parallelBatch( - map(car.blocks(), ({ cid, bytes }) => () => put(cid, bytes)), - concurrency - ) - ) + await batch.commit() + } await blockstore.close() From 567ca55f1ad5d043f37e1e96994da810c4c55385 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Wed, 3 Mar 2021 19:43:54 +0000 Subject: [PATCH 14/21] chore: missing dep --- package.json | 1 + 1 file changed, 1 insertion(+) diff --git a/package.json b/package.json index 1c5af30..ff2b617 100644 --- a/package.json +++ b/package.json @@ -73,6 +73,7 @@ "datastore-level": "^4.0.0", "events": "^3.2.0", "it-all": "^1.0.2", + "it-batch": "^1.0.8", "just-safe-set": "^2.1.0", "level-5": "npm:level@^5.0.0", "level-6": "npm:level@^6.0.0", From c4bd08777c06e9dbcdc2dcb64d58bfbbf4d58972 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Wed, 3 Mar 2021 20:01:14 +0000 Subject: [PATCH 15/21] chore: print output to keep ci alive --- test/migrations/migration-9-test.js | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/test/migrations/migration-9-test.js b/test/migrations/migration-9-test.js index f479007..66f863d 100644 --- a/test/migrations/migration-9-test.js +++ b/test/migrations/migration-9-test.js @@ -83,15 +83,22 @@ async function bootstrapBlocks (blockstore, datastore, { car: carBuf, root: expe await blockstore.open() const concurrency = 50 + let blocks = 0 for await (const b of batch(car.blocks(), concurrency)) { + const start = Date.now() const batch = blockstore.batch() for await (const { cid, bytes } of b) { batch.put(cidToKey(new CID(cid.toString())), bytes) + blocks++ } await batch.commit() + + const took = Date.now() - start + + console.info('wrote', blocks, 'blocks in', (took / 1000), 's', Math.round(took / blocks), 'ms per block') // eslint-disable-line no-console } await blockstore.close() From 2c32fd1ebf6da42c675ed40362549bdbb99829a3 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Wed, 3 Mar 2021 20:11:25 +0000 Subject: [PATCH 16/21] chore: print output to keep ci alive --- test/migrations/migration-9-test.js | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/test/migrations/migration-9-test.js b/test/migrations/migration-9-test.js index 66f863d..e80fc8f 100644 --- a/test/migrations/migration-9-test.js +++ b/test/migrations/migration-9-test.js @@ -163,7 +163,9 @@ module.exports = (setup, cleanup, repoOptions) => { }) it('should migrate pins forward', async () => { - await migration.migrate(dir, repoOptions, () => {}) + await migration.migrate(dir, repoOptions, (percent, message) => { + console.info(Math.round(percent) + '%', message) // eslint-disable-line no-console + }) await pinstore.open() let migratedDirect = 0 @@ -212,7 +214,9 @@ module.exports = (setup, cleanup, repoOptions) => { }) it('should migrate pins backward', async () => { - await migration.revert(dir, repoOptions, () => {}) + await migration.revert(dir, repoOptions, (percent, message) => { + console.info(Math.round(percent) + '%', message) // eslint-disable-line no-console + }) await assertPinsetRootIsPresent(datastore, pinset) }) From a4f3d95b50af53a3910286bd11932810eaab67b0 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 4 Mar 2021 07:29:36 +0000 Subject: [PATCH 17/21] chore: skip test in browser that exceeds quota size --- test/migrations/migration-9-test.js | 40 ++++++++++++++--------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/test/migrations/migration-9-test.js b/test/migrations/migration-9-test.js index e80fc8f..f4e9159 100644 --- a/test/migrations/migration-9-test.js +++ b/test/migrations/migration-9-test.js @@ -12,28 +12,39 @@ const { CarReader } = require('@ipld/car') const loadFixture = require('aegir/utils/fixtures') const multibase = require('multibase') const batch = require('it-batch') +const { isBrowser } = require('ipfs-utils/src/env') function pinToCid (key, pin) { const buf = multibase.encoding('base32upper').decode(key.toString().split('/').pop()) return new CID(pin.version || 0, pin.codec || 'dag-pb', buf) } -// the test data is generated by the file /test/fixtures/generate-car-files.js -// nb. you need to `npm install ipfs@0.48.1` or below in a place it can `require` -// it from before running it +/** + * The test data is generated by the file /test/fixtures/generate-car-files.js + * nb. you need to `npm install ipfs@0.48.1` or below in a place it can `require` + * it from before running it + * + * @type {Record} + */ const pinsets = { 'basic pinset': { car: loadFixture('test/fixtures/pinset-basic.car'), root: new CID('QmeKxgcTtiE1XfvwcVf8wc65GgMmZumEtXK6YJKuvf3VYx'), pins: 31 - }, - 'multiple bucket pinset': { + } +} + +if (!isBrowser) { + // this pinset is too big for browser storage + pinsets['multiple bucket pinset'] = { car: loadFixture('test/fixtures/pinset-multiple-buckets.car'), root: new CID('QmPGd36dodHj1LQtVWK3LcBVkkVWvfXHEwBHnMpN6tu4BD'), // we need at least 8192 pins in order to create a new bucket pins: 9031 } +} else { + navigator.storage.estimate().then(console.log) // eslint-disable-line no-console } const directPins = [ @@ -83,22 +94,15 @@ async function bootstrapBlocks (blockstore, datastore, { car: carBuf, root: expe await blockstore.open() const concurrency = 50 - let blocks = 0 - for await (const b of batch(car.blocks(), concurrency)) { - const start = Date.now() + for await (const blocks of batch(car.blocks(), concurrency)) { const batch = blockstore.batch() - for await (const { cid, bytes } of b) { + for await (const { cid, bytes } of blocks) { batch.put(cidToKey(new CID(cid.toString())), bytes) - blocks++ } await batch.commit() - - const took = Date.now() - start - - console.info('wrote', blocks, 'blocks in', (took / 1000), 's', Math.round(took / blocks), 'ms per block') // eslint-disable-line no-console } await blockstore.close() @@ -163,9 +167,7 @@ module.exports = (setup, cleanup, repoOptions) => { }) it('should migrate pins forward', async () => { - await migration.migrate(dir, repoOptions, (percent, message) => { - console.info(Math.round(percent) + '%', message) // eslint-disable-line no-console - }) + await migration.migrate(dir, repoOptions, () => {}) await pinstore.open() let migratedDirect = 0 @@ -214,9 +216,7 @@ module.exports = (setup, cleanup, repoOptions) => { }) it('should migrate pins backward', async () => { - await migration.revert(dir, repoOptions, (percent, message) => { - console.info(Math.round(percent) + '%', message) // eslint-disable-line no-console - }) + await migration.revert(dir, repoOptions, () => {}) await assertPinsetRootIsPresent(datastore, pinset) }) From bca8d35b5652b19e2dbc11b29ce4a03ad4afc273 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 4 Mar 2021 07:53:50 +0000 Subject: [PATCH 18/21] chore: add missing dep --- package.json | 3 ++- test/migrations/migration-9-test.js | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index ff2b617..c608159 100644 --- a/package.json +++ b/package.json @@ -82,7 +82,8 @@ "readable-stream": "^3.6.0", "rimraf": "^3.0.0", "sinon": "^9.0.2", - "util": "^0.12.3" + "util": "^0.12.3", + "wherearewe": "^1.0.0" }, "engines": { "node": ">=10.0.0", diff --git a/test/migrations/migration-9-test.js b/test/migrations/migration-9-test.js index f4e9159..2fef93d 100644 --- a/test/migrations/migration-9-test.js +++ b/test/migrations/migration-9-test.js @@ -12,7 +12,7 @@ const { CarReader } = require('@ipld/car') const loadFixture = require('aegir/utils/fixtures') const multibase = require('multibase') const batch = require('it-batch') -const { isBrowser } = require('ipfs-utils/src/env') +const { isBrowser } = require('wherearewe') function pinToCid (key, pin) { const buf = multibase.encoding('base32upper').decode(key.toString().split('/').pop()) From 19b0a1c2ce0e0a13a54865feee9e2731aaadd674 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 4 Mar 2021 08:12:54 +0000 Subject: [PATCH 19/21] chore: print storage quota detals --- test/migrations/migration-9-test.js | 51 ++++++++++++++++++----------- 1 file changed, 32 insertions(+), 19 deletions(-) diff --git a/test/migrations/migration-9-test.js b/test/migrations/migration-9-test.js index 2fef93d..4271589 100644 --- a/test/migrations/migration-9-test.js +++ b/test/migrations/migration-9-test.js @@ -19,32 +19,22 @@ function pinToCid (key, pin) { return new CID(pin.version || 0, pin.codec || 'dag-pb', buf) } -/** - * The test data is generated by the file /test/fixtures/generate-car-files.js - * nb. you need to `npm install ipfs@0.48.1` or below in a place it can `require` - * it from before running it - * - * @type {Record} - */ +// the test data is generated by the file /test/fixtures/generate-car-files.js +// nb. you need to `npm install ipfs@0.48.1` or below in a place it can `require` +// it from before running it const pinsets = { 'basic pinset': { car: loadFixture('test/fixtures/pinset-basic.car'), root: new CID('QmeKxgcTtiE1XfvwcVf8wc65GgMmZumEtXK6YJKuvf3VYx'), pins: 31 - } -} - -if (!isBrowser) { - // this pinset is too big for browser storage - pinsets['multiple bucket pinset'] = { + }, + 'multiple bucket pinset': { car: loadFixture('test/fixtures/pinset-multiple-buckets.car'), root: new CID('QmPGd36dodHj1LQtVWK3LcBVkkVWvfXHEwBHnMpN6tu4BD'), // we need at least 8192 pins in order to create a new bucket pins: 9031 } -} else { - navigator.storage.estimate().then(console.log) // eslint-disable-line no-console } const directPins = [ @@ -94,15 +84,26 @@ async function bootstrapBlocks (blockstore, datastore, { car: carBuf, root: expe await blockstore.open() const concurrency = 50 + let blocks = 0 - for await (const blocks of batch(car.blocks(), concurrency)) { + for await (const b of batch(car.blocks(), concurrency)) { + const start = Date.now() const batch = blockstore.batch() - for await (const { cid, bytes } of blocks) { + for await (const { cid, bytes } of b) { batch.put(cidToKey(new CID(cid.toString())), bytes) + blocks++ } await batch.commit() + + const took = Date.now() - start + + console.info('wrote', blocks, 'blocks in', (took / 1000), 's', Math.round(took / blocks), 'ms per block') // eslint-disable-line no-console + + if (isBrowser) { + console.info(await navigator.storage.estimate()) // eslint-disable-line no-console + } } await blockstore.close() @@ -167,7 +168,13 @@ module.exports = (setup, cleanup, repoOptions) => { }) it('should migrate pins forward', async () => { - await migration.migrate(dir, repoOptions, () => {}) + await migration.migrate(dir, repoOptions, (percent, message) => { + console.info(Math.round(percent) + '%', message) // eslint-disable-line no-console + + if (isBrowser) { + navigator.storage.estimate().then(console.info) // eslint-disable-line no-console + } + }) await pinstore.open() let migratedDirect = 0 @@ -216,7 +223,13 @@ module.exports = (setup, cleanup, repoOptions) => { }) it('should migrate pins backward', async () => { - await migration.revert(dir, repoOptions, () => {}) + await migration.revert(dir, repoOptions, (percent, message) => { + console.info(Math.round(percent) + '%', message) // eslint-disable-line no-console + + if (isBrowser) { + navigator.storage.estimate().then(console.info) // eslint-disable-line no-console + } + }) await assertPinsetRootIsPresent(datastore, pinset) }) From 7e12f6aa8190d00f524839c849d29c176bc2e1ba Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 4 Mar 2021 08:42:44 +0000 Subject: [PATCH 20/21] chore: delete indexdb db after tests --- test/browser.js | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/test/browser.js b/test/browser.js index f9207e4..014917e 100644 --- a/test/browser.js +++ b/test/browser.js @@ -37,7 +37,31 @@ const repoOptions = { } } +async function deleteDb (dir) { + return new Promise((resolve) => { + const req = globalThis.indexedDB.deleteDatabase(dir) + + req.onblocked = () => { + console.error(`${dir} in blocked state`) // eslint-disable-line no-console + } + req.onerror = () => { + console.error(`Could not delete ${dir}`) // eslint-disable-line no-console + resolve() + } + req.onsuccess = () => { + resolve() + } + }) +} + async function repoCleanup (dir) { + await deleteDb(dir) + await deleteDb('level-js-' + dir) + + for (const type of ['blocks', 'keys', 'datastore', 'pins']) { + await deleteDb(dir + '/' + type) + await deleteDb('level-js-' + dir + '/' + type) + } } describe('Browser specific tests', () => { From eb2ee9a6a3954897bc67ca348d3943f83da40d03 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 4 Mar 2021 11:41:54 +0000 Subject: [PATCH 21/21] chore: fix browser tests --- package.json | 4 +-- test/browser.js | 4 --- test/migrations/migration-9-test.js | 44 ++++++----------------------- 3 files changed, 9 insertions(+), 43 deletions(-) diff --git a/package.json b/package.json index c608159..1c5af30 100644 --- a/package.json +++ b/package.json @@ -73,7 +73,6 @@ "datastore-level": "^4.0.0", "events": "^3.2.0", "it-all": "^1.0.2", - "it-batch": "^1.0.8", "just-safe-set": "^2.1.0", "level-5": "npm:level@^5.0.0", "level-6": "npm:level@^6.0.0", @@ -82,8 +81,7 @@ "readable-stream": "^3.6.0", "rimraf": "^3.0.0", "sinon": "^9.0.2", - "util": "^0.12.3", - "wherearewe": "^1.0.0" + "util": "^0.12.3" }, "engines": { "node": ">=10.0.0", diff --git a/test/browser.js b/test/browser.js index 014917e..d149fce 100644 --- a/test/browser.js +++ b/test/browser.js @@ -40,10 +40,6 @@ const repoOptions = { async function deleteDb (dir) { return new Promise((resolve) => { const req = globalThis.indexedDB.deleteDatabase(dir) - - req.onblocked = () => { - console.error(`${dir} in blocked state`) // eslint-disable-line no-console - } req.onerror = () => { console.error(`Could not delete ${dir}`) // eslint-disable-line no-console resolve() diff --git a/test/migrations/migration-9-test.js b/test/migrations/migration-9-test.js index 4271589..37a4d4a 100644 --- a/test/migrations/migration-9-test.js +++ b/test/migrations/migration-9-test.js @@ -11,8 +11,6 @@ const CID = require('cids') const { CarReader } = require('@ipld/car') const loadFixture = require('aegir/utils/fixtures') const multibase = require('multibase') -const batch = require('it-batch') -const { isBrowser } = require('wherearewe') function pinToCid (key, pin) { const buf = multibase.encoding('base32upper').decode(key.toString().split('/').pop()) @@ -83,29 +81,15 @@ async function bootstrapBlocks (blockstore, datastore, { car: carBuf, root: expe expect(actualRoot.toString()).to.equal(expectedRoot.toString()) await blockstore.open() - const concurrency = 50 - let blocks = 0 + const batch = blockstore.batch() - for await (const b of batch(car.blocks(), concurrency)) { - const start = Date.now() - const batch = blockstore.batch() - - for await (const { cid, bytes } of b) { - batch.put(cidToKey(new CID(cid.toString())), bytes) - blocks++ - } - - await batch.commit() - - const took = Date.now() - start - - console.info('wrote', blocks, 'blocks in', (took / 1000), 's', Math.round(took / blocks), 'ms per block') // eslint-disable-line no-console - - if (isBrowser) { - console.info(await navigator.storage.estimate()) // eslint-disable-line no-console - } + for await (const { cid, bytes } of car.blocks()) { + // if we don't create a new Uint8Array, IDB writes the whole backing buffer into the db + batch.put(cidToKey(new CID(cid.toString())), new Uint8Array(bytes.buffer, bytes.byteOffset, bytes.byteLength)) } + await batch.commit() + await blockstore.close() await datastore.open() @@ -168,13 +152,7 @@ module.exports = (setup, cleanup, repoOptions) => { }) it('should migrate pins forward', async () => { - await migration.migrate(dir, repoOptions, (percent, message) => { - console.info(Math.round(percent) + '%', message) // eslint-disable-line no-console - - if (isBrowser) { - navigator.storage.estimate().then(console.info) // eslint-disable-line no-console - } - }) + await migration.migrate(dir, repoOptions, () => {}) await pinstore.open() let migratedDirect = 0 @@ -223,13 +201,7 @@ module.exports = (setup, cleanup, repoOptions) => { }) it('should migrate pins backward', async () => { - await migration.revert(dir, repoOptions, (percent, message) => { - console.info(Math.round(percent) + '%', message) // eslint-disable-line no-console - - if (isBrowser) { - navigator.storage.estimate().then(console.info) // eslint-disable-line no-console - } - }) + await migration.revert(dir, repoOptions, () => {}) await assertPinsetRootIsPresent(datastore, pinset) })