From 43b44ad99200df846d599f77d1814dd75ffef6f1 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Sat, 15 Aug 2020 08:51:52 +0100 Subject: [PATCH] feat: report migration status Changes the `onProgress` option to receive feedback on how far each migration has progressed with a percent complete indicator and a message to show the user. If an `onProgress` option is passed, migrations will get a bit slower as we need to calculate the total volume of work before starting a migration in order to work out the percent complete. Also removes datastores from runtime dependencies as these should only ever be passed in as config. Fixes #32 BREAKING CHANGES: - The signature of the `onProgress` callback has changed --- README.md | 4 +- migrations/migration-8/index.js | 58 +++++++++------------- migrations/migration-9/index.js | 54 ++++++++++++++++----- migrations/migration-9/utils.js | 26 ---------- package.json | 10 ++-- src/index.js | 59 ++++++++++++++++------- src/migration-templates.js | 12 ++--- src/repo/init.js | 9 +--- src/repo/version.js | 16 ++---- src/utils.js | 59 ++++++++++++++++------- test/fixtures/repo.js | 27 ++--------- test/index.spec.js | 19 +++++--- test/init-test.js | 34 ++----------- test/integration-test.js | 29 ++++------- test/lock-test.js | 4 +- test/migrations/index.js | 13 ++++- test/migrations/migration-8-test.js | 53 +++++++------------- test/migrations/migration-9-test.js | 58 +++++++++++----------- test/node.js | 2 + test/test-migrations/migration-2/index.js | 39 +++++---------- test/util.js | 11 ----- test/version-test.js | 28 ++--------- 22 files changed, 269 insertions(+), 355 deletions(-) delete mode 100644 test/util.js diff --git a/README.md b/README.md index 3a720a0..499bf14 100644 --- a/README.md +++ b/README.md @@ -29,7 +29,7 @@ This package is inspired by the [go-ipfs repo migration tool](https://github.com - [Usage](#usage) - [API](#api) - [`.migrate(path, repoOptions, toVersion, {ignoreLock, onProgress, isDryRun}) -> Promise`](#migratepath-repooptions-toversion-ignorelock-onprogress-isdryrun---promisevoid) - - [`onProgress(migration, counter, totalMigrations)`](#onprogressmigration-counter-totalmigrations) + - [`onProgress(versionFrom, versionTo, percent, message)`](#onprogressversionfrom-versionto-percent-message) - [`.revert(path, repoOptions, toVersion, {ignoreLock, onProgress, isDryRun}) -> Promise`](#revertpath-repooptions-toversion-ignorelock-onprogress-isdryrun---promisevoid) - [`getLatestMigrationVersion() -> int`](#getlatestmigrationversion---int) - [Creating a new migration](#creating-a-new-migration) @@ -121,7 +121,7 @@ Executes a forward migration to a specific version, or to the latest version if * `options.onProgress` (function, optional) - callback that is called after finishing execution of each migration to report progress. * `options.isDryRun` (bool, optional) - flag that indicates if it is a dry run that should give the same output as running a migration but without making any actual changes. -#### `onProgress(migration, counter, totalMigrations)` +#### `onProgress(versionFrom, versionTo, percent, message)` Signature of the progress callback. diff --git a/migrations/migration-8/index.js b/migrations/migration-8/index.js index 50ee69b..b79b2f4 100644 --- a/migrations/migration-8/index.js +++ b/migrations/migration-8/index.js @@ -2,25 +2,11 @@ const CID = require('cids') const Key = require('interface-datastore').Key -const core = require('datastore-core') -const ShardingStore = core.ShardingDatastore const mb = require('multibase') -const utils = require('../../src/utils') const log = require('debug')('ipfs-repo-migrations:migration-8') const uint8ArrayToString = require('uint8arrays/to-string') - -// This function in js-ipfs-repo defaults to not using sharding -// but the default value of the options.sharding is true hence this -// function defaults to use sharding. -async function maybeWithSharding (filestore, options) { - if (options.sharding === false) { - return filestore - } - - const shard = new core.shard.NextToLast(2) - - return ShardingStore.createOrOpen(filestore, shard) -} +const { createStore } = require('../../src/utils') +const length = require('it-length') function keyToMultihash (key) { const buf = mb.decode(`b${key.toString().slice(1)}`) @@ -46,44 +32,46 @@ function keyToCid (key) { return new Key(`/${uint8ArrayToString(multihash)}`.toUpperCase(), false) } -async function process (repoPath, options, keyFunction){ - const { StorageBackend, storageOptions } = utils.getDatastoreAndOptions(options, 'blocks') +async function process (repoPath, repoOptions, onProgress, keyFunction) { + const blockstore = await createStore(repoPath, 'blocks', repoOptions) + await blockstore.open() + + let blockCount - const baseStore = new StorageBackend(`${repoPath}/blocks`, storageOptions) - await baseStore.open() - const store = await maybeWithSharding(baseStore, storageOptions) - await store.open() + if (onProgress) { + blockCount = await length(blockstore.query({ keysOnly: true })) + } try { let counter = 0 - for await (const block of store.query({})) { + for await (const block of blockstore.query({})) { const newKey = keyFunction(block.key) + counter += 1 // If the Key is base32 CIDv0 then there's nothing to do if(newKey.toString() !== block.key.toString()) { - counter += 1 + log(`Migrating Block from ${block.key} to ${newKey}`) + await blockstore.delete(block.key) + await blockstore.put(newKey, block.value) - log(`Migrating Block from ${block.key.toString()} to ${newKey.toString()}`) - await store.delete(block.key) - await store.put(newKey, block.value) + if (onProgress) { + onProgress((counter / blockCount) * 100, `Migrated Block from ${block.key} to ${newKey}`) + } } } - - log(`Changed ${ counter } blocks`) } finally { - await store.close() - await baseStore.close() + await blockstore.close() } } module.exports = { version: 8, description: 'Transforms key names into base32 encoding and converts Block store to use bare multihashes encoded as base32', - migrate: (repoPath, options = {}) => { - return process(repoPath, options, keyToMultihash) + migrate: (repoPath, repoOptions, onProgress) => { + return process(repoPath, repoOptions, onProgress, keyToMultihash) }, - revert: (repoPath, options = {}) => { - return process(repoPath, options, keyToCid) + revert: (repoPath, repoOptions, onProgress) => { + return process(repoPath, repoOptions, onProgress, keyToCid) } } diff --git a/migrations/migration-9/index.js b/migrations/migration-9/index.js index d209dd8..279cbfc 100644 --- a/migrations/migration-9/index.js +++ b/migrations/migration-9/index.js @@ -6,16 +6,21 @@ const cbor = require('cbor') const multicodec = require('multicodec') const multibase = require('multibase') const pinset = require('./pin-set') -const { createStore, cidToKey, PIN_DS_KEY, PinTypes } = require('./utils') +const { createStore } = require('../../src/utils') +const { cidToKey, PIN_DS_KEY, PinTypes } = require('./utils') +const length = require('it-length') -async function pinsToDatastore (blockstore, datastore, pinstore) { +async function pinsToDatastore (blockstore, datastore, pinstore, onProgress) { const mh = await datastore.get(PIN_DS_KEY) const cid = new CID(mh) - const pinRootBuf = await blockstore.get(cidToKey(cid)) const pinRoot = dagpb.util.deserialize(pinRootBuf) + const pinCount = (await length(pinset.loadSet(blockstore, pinRoot, PinTypes.recursive))) + (await length(pinset.loadSet(blockstore, pinRoot, PinTypes.direct))) + let counter = 0 + for await (const cid of pinset.loadSet(blockstore, pinRoot, PinTypes.recursive)) { + counter++ const pin = { depth: Infinity } @@ -29,9 +34,12 @@ async function pinsToDatastore (blockstore, datastore, pinstore) { } await pinstore.put(cidToKey(cid), cbor.encode(pin)) + + onProgress((counter / pinCount) * 100, `Migrated recursive pin ${cid}`) } for await (const cid of pinset.loadSet(blockstore, pinRoot, PinTypes.direct)) { + counter++ const pin = { depth: 0 } @@ -45,27 +53,47 @@ async function pinsToDatastore (blockstore, datastore, pinstore) { } await pinstore.put(cidToKey(cid), cbor.encode(pin)) + + onProgress((counter / pinCount) * 100, `Migrated direct pin ${cid}`) } await blockstore.delete(cidToKey(cid)) await datastore.delete(PIN_DS_KEY) } -async function pinsToDAG (blockstore, datastore, pinstore) { +async function pinsToDAG (blockstore, datastore, pinstore, onProgress) { let recursivePins = [] let directPins = [] + let pinCount + + if (onProgress) { + pinCount = await length(pinstore.query({ keysOnly: true })) + } + + let counter = 0 + for await (const { key, value } of pinstore.query({})) { + counter++ const pin = cbor.decode(value) const cid = new CID(pin.version || 0, pin.codec && multicodec.getName(pin.codec) || 'dag-pb', multibase.decode('b' + key.toString().split('/').pop())) if (pin.depth === 0) { + if (onProgress) { + onProgress((counter / pinCount) * 100, `Reverted direct pin ${cid}`) + } + directPins.push(cid) } else { + if (onProgress) { + onProgress((counter / pinCount) * 100, `Reverted recursive pin ${cid}`) + } + recursivePins.push(cid) } } + onProgress(100, 'Updating pin root') const pinRoot = new dagpb.DAGNode(new Uint8Array(), [ await pinset.storeSet(blockstore, PinTypes.recursive, recursivePins), await pinset.storeSet(blockstore, PinTypes.direct, directPins) @@ -79,17 +107,17 @@ async function pinsToDAG (blockstore, datastore, pinstore) { await datastore.put(PIN_DS_KEY, cid.multihash) } -async function process (repoPath, options, fn) { - const blockstore = await createStore(repoPath, 'blocks', options) - const datastore = await createStore(repoPath, 'datastore', options) - const pinstore = await createStore(repoPath, 'pins', options) +async function process (repoPath, repoOptions, onProgress, fn) { + const blockstore = await createStore(repoPath, 'blocks', repoOptions) + const datastore = await createStore(repoPath, 'datastore', repoOptions) + const pinstore = await createStore(repoPath, 'pins', repoOptions) await blockstore.open() await datastore.open() await pinstore.open() try { - await fn(blockstore, datastore, pinstore) + await fn(blockstore, datastore, pinstore, onProgress) } finally { await pinstore.close() await datastore.close() @@ -100,10 +128,10 @@ async function process (repoPath, options, fn) { module.exports = { version: 9, description: 'Migrates pins to datastore', - migrate: (repoPath, options = {}) => { - return process(repoPath, options, pinsToDatastore) + migrate: (repoPath, repoOptions, onProgress) => { + return process(repoPath, repoOptions, onProgress, pinsToDatastore) }, - revert: (repoPath, options = {}) => { - return process(repoPath, options, pinsToDAG) + revert: (repoPath, repoOptions, onProgress) => { + return process(repoPath, repoOptions, onProgress, pinsToDAG) } } diff --git a/migrations/migration-9/utils.js b/migrations/migration-9/utils.js index 893bba8..a8d6144 100644 --- a/migrations/migration-9/utils.js +++ b/migrations/migration-9/utils.js @@ -1,8 +1,5 @@ 'use strict' -const core = require('datastore-core') -const ShardingStore = core.ShardingDatastore -const utils = require('../../src/utils') const multibase = require('multibase') const { Key } = require('interface-datastore') const multihashes = require('multihashing-async').multihash @@ -21,34 +18,11 @@ function cidToKey (cid) { return new Key(`/${multibase.encoding('base32upper').encode(cid.multihash)}`) } -// This function in js-ipfs-repo defaults to not using sharding -// but the default value of the options.sharding is true hence this -// function defaults to use sharding. -async function maybeWithSharding (filestore, options) { - if (options.sharding === false) { - return filestore - } - - const shard = new core.shard.NextToLast(2) - - return ShardingStore.createOrOpen(filestore, shard) -} - -const createStore = async (location, name, options) => { - const { StorageBackend, storageOptions } = utils.getDatastoreAndOptions(options, name) - - let store = new StorageBackend(`${location}/${name}`, storageOptions) - store = maybeWithSharding(store, storageOptions) - - return store -} - module.exports = { PIN_DS_KEY, DEFAULT_FANOUT, MAX_ITEMS, EMPTY_KEY, PinTypes, - createStore, cidToKey } diff --git a/package.json b/package.json index abdad69..a9f9315 100644 --- a/package.json +++ b/package.json @@ -44,12 +44,11 @@ "cbor": "^5.0.2", "cids": "^1.0.0", "datastore-core": "^2.0.0", - "datastore-fs": "^2.0.0", - "datastore-level": "^2.0.0", "debug": "^4.1.0", "fnv1a": "^1.0.1", "interface-datastore": "^2.0.0", "ipld-dag-pb": "^0.20.0", + "it-length": "0.0.2", "multibase": "^3.0.0", "multicodec": "^2.0.0", "multihashing-async": "^2.0.0", @@ -59,11 +58,10 @@ "varint": "^5.0.0" }, "devDependencies": { - "aegir": "^25.0.0", - "chai": "^4.2.0", - "chai-as-promised": "^7.1.1", + "aegir": "^26.0.0", "datastore-car": "^1.2.0", - "dirty-chai": "^2.0.1", + "datastore-fs": "^2.0.1", + "datastore-level": "^2.0.0", "it-all": "^1.0.2", "just-safe-set": "^2.1.0", "ncp": "^2.0.0", diff --git a/src/index.js b/src/index.js index 6da627b..5b40fba 100644 --- a/src/index.js +++ b/src/index.js @@ -47,7 +47,6 @@ exports.getLatestMigrationVersion = getLatestMigrationVersion */ async function migrate (path, repoOptions, toVersion, { ignoreLock = false, onProgress, isDryRun = false, migrations }) { migrations = migrations || defaultMigrations - onProgress = onProgress || (() => {}) if (!path) { throw new errors.RequiredParameterError('Path argument is required!') @@ -79,23 +78,33 @@ async function migrate (path, repoOptions, toVersion, { ignoreLock = false, onPr verifyAvailableMigrations(migrations, currentVersion, toVersion) let lock - if (!isDryRun && !ignoreLock) lock = await repoLock.lock(currentVersion, path) + + if (!isDryRun && !ignoreLock) { + lock = await repoLock.lock(currentVersion, path) + } try { - let counter = 0 - const totalMigrations = toVersion - currentVersion for (const migration of migrations) { if (toVersion !== undefined && migration.version > toVersion) { break } + if (migration.version <= currentVersion) { continue } - counter++ log(`Migrating version ${migration.version}`) + try { - if (!isDryRun) await migration.migrate(path, repoOptions) + if (!isDryRun) { + let progressCallback + + if (onProgress) { // eslint-disable-line max-depth + progressCallback = (percent, message) => onProgress(currentVersion, migration.version, percent.toFixed(2), message) + } + + await migration.migrate(path, repoOptions, progressCallback) + } } catch (e) { const lastSuccessfullyMigratedVersion = migration.version - 1 log(`An exception was raised during execution of migration. Setting the repo's version to last successfully migrated version: ${lastSuccessfullyMigratedVersion}`) @@ -105,14 +114,18 @@ async function migrate (path, repoOptions, toVersion, { ignoreLock = false, onPr throw e } - onProgress(migration, counter, totalMigrations) // Reports on migration process log(`Migrating to version ${migration.version} finished`) } - if (!isDryRun) await repoVersion.setVersion(path, toVersion || getLatestMigrationVersion(migrations), repoOptions) + if (!isDryRun) { + await repoVersion.setVersion(path, toVersion || getLatestMigrationVersion(migrations), repoOptions) + } + log('Repo successfully migrated ', toVersion !== undefined ? `to version ${toVersion}!` : 'to latest version!') } finally { - if (!isDryRun && !ignoreLock) await lock.close() + if (!isDryRun && !ignoreLock) { + await lock.close() + } } } @@ -136,7 +149,6 @@ exports.migrate = migrate */ async function revert (path, repoOptions, toVersion, { ignoreLock = false, onProgress, isDryRun = false, migrations }) { migrations = migrations || defaultMigrations - onProgress = onProgress || (() => {}) if (!path) { throw new errors.RequiredParameterError('Path argument is required!') @@ -155,6 +167,7 @@ async function revert (path, repoOptions, toVersion, { ignoreLock = false, onPro } const currentVersion = await repoVersion.getVersion(path, repoOptions) + if (currentVersion === toVersion) { log('Nothing to revert.') return @@ -170,10 +183,10 @@ async function revert (path, repoOptions, toVersion, { ignoreLock = false, onPro if (!isDryRun && !ignoreLock) lock = await repoLock.lock(currentVersion, path) log(`Reverting from version ${currentVersion} to ${toVersion}`) + try { - let counter = 0 - const totalMigrations = currentVersion - toVersion const reversedMigrationArray = migrations.slice().reverse() + for (const migration of reversedMigrationArray) { if (migration.version <= toVersion) { break @@ -183,10 +196,18 @@ async function revert (path, repoOptions, toVersion, { ignoreLock = false, onPro continue } - counter++ log(`Reverting migration version ${migration.version}`) + try { - if (!isDryRun) await migration.revert(path, repoOptions) + if (!isDryRun) { + let progressCallback + + if (onProgress) { // eslint-disable-line max-depth + progressCallback = (percent, message) => onProgress(currentVersion, migration.version, percent.toFixed(2), message) + } + + await migration.revert(path, repoOptions, progressCallback) + } } catch (e) { const lastSuccessfullyRevertedVersion = migration.version log(`An exception was raised during execution of migration. Setting the repo's version to last successfully reverted version: ${lastSuccessfullyRevertedVersion}`) @@ -196,14 +217,18 @@ async function revert (path, repoOptions, toVersion, { ignoreLock = false, onPro throw e } - onProgress(migration, counter, totalMigrations) // Reports on migration process log(`Reverting to version ${migration.version} finished`) } - if (!isDryRun) await repoVersion.setVersion(path, toVersion, repoOptions) + if (!isDryRun) { + await repoVersion.setVersion(path, toVersion, repoOptions) + } + log(`All migrations successfully reverted to version ${toVersion}!`) } finally { - if (!isDryRun && !ignoreLock) await lock.close() + if (!isDryRun && !ignoreLock) { + await lock.close() + } } } diff --git a/src/migration-templates.js b/src/migration-templates.js index 3532038..1c2cbb3 100644 --- a/src/migration-templates.js +++ b/src/migration-templates.js @@ -3,12 +3,11 @@ module.exports = { indexJs: `'use strict' -const utils = require('../../src/utils') +const { createStore } = require('../../src/utils') const log = require('debug')('repo-migrations:migration-{{version}}') -async function migrate(repoPath, repoOptions, isBrowser) { - const { StorageBackend, storageOptions } = utils.getDatastoreAndOptions(repoOptions, 'root') - const store = new StorageBackend(repoPath, storageOptions) +async function migrate(repoPath, { repoOptions, onProgress }) { + const store = await createStore(repoPath, 'root', repoOptions) store.open() try { @@ -18,9 +17,8 @@ async function migrate(repoPath, repoOptions, isBrowser) { } } -async function revert(repoPath, repoOptions, isBrowser) { - const { StorageBackend, storageOptions } = utils.getDatastoreAndOptions(repoOptions, 'root') - const store = new StorageBackend(repoPath, storageOptions) +async function revert(repoPath, { repoOptions, onProgress }) { + const store = await createStore(repoPath, 'root', repoOptions) store.open() try { diff --git a/src/repo/init.js b/src/repo/init.js index 400dbb0..176c868 100644 --- a/src/repo/init.js +++ b/src/repo/init.js @@ -1,7 +1,7 @@ 'use strict' const log = require('debug')('repo-migrations:repo:init') -const { CONFIG_KEY, VERSION_KEY, getDatastoreAndOptions } = require('../utils') +const { CONFIG_KEY, VERSION_KEY, createStore } = require('../utils') const { MissingRepoOptionsError } = require('../errors') exports.isRepoInitialized = async function isRepoInitialized (path, repoOptions) { @@ -11,12 +11,7 @@ exports.isRepoInitialized = async function isRepoInitialized (path, repoOptions) let root try { - const { - StorageBackend, - storageOptions - } = getDatastoreAndOptions(repoOptions, 'root') - - root = new StorageBackend(path, storageOptions) + root = await createStore(path, 'root', repoOptions) await root.open() const versionCheck = await root.has(VERSION_KEY) const configCheck = await root.has(CONFIG_KEY) diff --git a/src/repo/version.js b/src/repo/version.js index 22ab720..acc2217 100644 --- a/src/repo/version.js +++ b/src/repo/version.js @@ -2,7 +2,7 @@ const repoInit = require('./init') const { MissingRepoOptionsError, NotInitializedRepoError } = require('../errors') -const { VERSION_KEY, getDatastoreAndOptions } = require('../utils') +const { VERSION_KEY, createStore } = require('../utils') const uint8ArrayFromString = require('uint8arrays/from-string') exports.getVersion = getVersion @@ -25,12 +25,7 @@ async function getVersion (path, repoOptions) { throw new MissingRepoOptionsError('Please pass repo options when trying to open a repo') } - const { - StorageBackend, - storageOptions - } = getDatastoreAndOptions(repoOptions, 'root') - - const store = new StorageBackend(path, storageOptions) + const store = await createStore(path, 'root', repoOptions) await store.open() const version = parseInt(await store.get(VERSION_KEY)) @@ -52,12 +47,7 @@ async function setVersion (path, version, repoOptions) { throw new MissingRepoOptionsError('Please pass repo options when trying to open a repo') } - const { - StorageBackend, - storageOptions - } = getDatastoreAndOptions(repoOptions, 'root') - - const store = new StorageBackend(path, storageOptions) + const store = await createStore(path, 'root', repoOptions) await store.open() await store.put(VERSION_KEY, uint8ArrayFromString(String(version))) await store.close() diff --git a/src/utils.js b/src/utils.js index 62e279d..0f3c853 100644 --- a/src/utils.js +++ b/src/utils.js @@ -1,29 +1,27 @@ 'use strict' -const Datastore = require('datastore-fs') const Key = require('interface-datastore').Key +const core = require('datastore-core') +const ShardingStore = core.ShardingDatastore exports.CONFIG_KEY = new Key('/config') exports.VERSION_KEY = new Key('/version') -exports.getDatastoreAndOptions = function getDatastoreAndOptions (options, key, defaultDatastore = Datastore) { - let StorageBackend, storageBackendOptions - if (options !== undefined && - options.storageBackends !== undefined && - options.storageBackends[key] !== undefined - ) { - StorageBackend = options.storageBackends[key] - } else { - StorageBackend = defaultDatastore +function getDatastoreAndOptions (name, options) { + if (!options || !options.storageBackends) { + throw new Error('Please pass storage backend definitions') } - if (options !== undefined && - options.storageBackendOptions !== undefined && - options.storageBackendOptions[key] !== undefined - ) { - storageBackendOptions = options.storageBackendOptions[key] - } else { - storageBackendOptions = {} + if (!options.storageBackends[name]) { + throw new Error(`Storage backend '${name}' not defined in config`) + } + + const StorageBackend = options.storageBackends[name] + + let storageBackendOptions = {} + + if (options.storageBackendOptions !== undefined && options.storageBackendOptions[name] !== undefined) { + storageBackendOptions = options.storageBackendOptions[name] } return { @@ -32,8 +30,33 @@ exports.getDatastoreAndOptions = function getDatastoreAndOptions (options, key, } } -exports.containsIrreversibleMigration = function containsIrreversibleMigration (from, to, migrations) { +// This function in js-ipfs-repo defaults to not using sharding +// but the default value of the options.sharding is true hence this +// function defaults to use sharding. +function maybeWithSharding (store, options) { + if (options.sharding === false) { + return store + } + + const shard = new core.shard.NextToLast(2) + return ShardingStore.createOrOpen(store, shard) +} + +async function createStore (location, name, options) { + const { StorageBackend, storageOptions } = getDatastoreAndOptions(name, options) + let store = new StorageBackend(`${location}/${name}`, storageOptions) + store = await maybeWithSharding(store, storageOptions) + + await store.close() + + return store +} + +function containsIrreversibleMigration (from, to, migrations) { return migrations .filter(migration => migration.version > from && migration.version <= to) .some(migration => migration.revert === undefined) } + +exports.createStore = createStore +exports.containsIrreversibleMigration = containsIrreversibleMigration diff --git a/test/fixtures/repo.js b/test/fixtures/repo.js index 5e19f44..2a65194 100644 --- a/test/fixtures/repo.js +++ b/test/fixtures/repo.js @@ -1,42 +1,25 @@ 'use strict' -const { Buffer } = require('buffer') const loadFixture = require('aegir/fixtures') -const { CONFIG_KEY, VERSION_KEY, getDatastoreAndOptions } = require('../../src/utils') +const { CONFIG_KEY, VERSION_KEY, createStore } = require('../../src/utils') async function createRepo (repoOptions) { - const { - StorageBackend, - storageOptions - } = getDatastoreAndOptions(repoOptions, 'root') - const date = Date.now().toString() const dir = 'test-repo-for-' + date - const store = new StorageBackend(dir, { - ...storageOptions, - createIfMissing: true - }) + const store = await createStore(dir, 'root', repoOptions) await store.open() await store.close() return dir } async function createAndLoadRepo (repoOptions) { - const { - StorageBackend, - storageOptions - } = getDatastoreAndOptions(repoOptions, 'root') - const date = Date.now().toString() const dir = 'test-repo-for-' + date - const store = new StorageBackend(dir, { - ...storageOptions, - createIfMissing: true - }) + const store = await createStore(dir, 'root', repoOptions) await store.open() - await store.put(VERSION_KEY, Buffer.from(loadFixture('test/fixtures/test-repo/version'))) - await store.put(CONFIG_KEY, Buffer.from(loadFixture('test/fixtures/test-repo/config'))) + await store.put(VERSION_KEY, loadFixture('test/fixtures/test-repo/version')) + await store.put(CONFIG_KEY, loadFixture('test/fixtures/test-repo/config')) await store.close() diff --git a/test/index.spec.js b/test/index.spec.js index 89960d4..84dd990 100644 --- a/test/index.spec.js +++ b/test/index.spec.js @@ -1,11 +1,8 @@ /* eslint-env mocha */ 'use strict' -const chai = require('chai') -const expect = chai.expect +const { expect } = require('aegir/utils/chai') const sinon = require('sinon') -chai.use(require('chai-as-promised')) -chai.use(require('dirty-chai')) const migrator = require('../src/index') const repoVersion = require('../src/repo/version') @@ -261,11 +258,14 @@ describe('index.js', () => { options.onProgress = sinon.stub() getVersionStub.returns(4) + options.migrations[2].revert = (path, repoOptions, onProgress) => { + onProgress(50, 'hello') + } + await expect(migrator.revert('/some/path', repoOptions, 2, options)) .to.eventually.be.fulfilled() - expect(options.onProgress.getCall(0).calledWith(sinon.match.any, 1, 2)).to.be.true() - expect(options.onProgress.getCall(1).calledWith(sinon.match.any, 2, 2)).to.be.true() + expect(options.onProgress.getCall(0).calledWith(4, 3, '50.00', 'hello')).to.be.true() }) it('should unlock repo when error is thrown', async () => { @@ -447,11 +447,14 @@ describe('index.js', () => { const repoOptions = createRepoOptions() getVersionStub.returns(2) + options.migrations[2].migrate = (path, repoOptions, onProgress) => { + onProgress(50, 'hello') + } + await expect(migrator.migrate('/some/path', repoOptions, 4, options)) .to.eventually.be.fulfilled() - expect(options.onProgress.getCall(0).calledWith(sinon.match.any, 1, 2)).to.be.true() - expect(options.onProgress.getCall(1).calledWith(sinon.match.any, 2, 2)).to.be.true() + expect(options.onProgress.getCall(0).calledWith(2, 3, '50.00', 'hello')).to.be.true() }) it('should unlock repo when error is thrown', async () => { diff --git a/test/init-test.js b/test/init-test.js index e1bcc9b..4125c0e 100644 --- a/test/init-test.js +++ b/test/init-test.js @@ -1,8 +1,8 @@ /* eslint-env mocha */ 'use strict' -const { expect } = require('./util') -const { CONFIG_KEY, VERSION_KEY, getDatastoreAndOptions } = require('../src/utils') +const { expect } = require('aegir/utils/chai') +const { CONFIG_KEY, VERSION_KEY, createStore } = require('../src/utils') const repoInit = require('../src/repo/init') const uint8ArrayFromString = require('uint8arrays/from-string') @@ -17,15 +17,7 @@ module.exports = (setup, cleanup, repoOptions) => { ) it('should return true with valid initialized repo', async () => { - const { - StorageBackend, - storageOptions - } = getDatastoreAndOptions(repoOptions, 'root') - - const store = new StorageBackend(dir, { - ...storageOptions, - createIfMissing: false - }) + const store = await createStore(dir, 'root', repoOptions) await store.open() await store.put(VERSION_KEY, uint8ArrayFromString('7')) await store.put(CONFIG_KEY, uint8ArrayFromString('config')) @@ -35,15 +27,7 @@ module.exports = (setup, cleanup, repoOptions) => { }) it('should return false with missing version key', async () => { - const { - StorageBackend, - storageOptions - } = getDatastoreAndOptions(repoOptions, 'root') - - const store = new StorageBackend(dir, { - ...storageOptions, - createIfMissing: false - }) + const store = await createStore(dir, 'root', repoOptions) await store.open() await store.put(CONFIG_KEY, '') await store.close() @@ -52,15 +36,7 @@ module.exports = (setup, cleanup, repoOptions) => { }) it('should return false with missing config key', async () => { - const { - StorageBackend, - storageOptions - } = getDatastoreAndOptions(repoOptions, 'root') - - const store = new StorageBackend(dir, { - ...storageOptions, - createIfMissing: false - }) + const store = await createStore(dir, 'root', repoOptions) await store.open() await store.put(VERSION_KEY, '') await store.close() diff --git a/test/integration-test.js b/test/integration-test.js index f5ed59d..262bb25 100644 --- a/test/integration-test.js +++ b/test/integration-test.js @@ -1,11 +1,11 @@ /* eslint-env mocha */ 'use strict' -const { expect } = require('./util') +const { expect } = require('aegir/utils/chai') const migrator = require('../src') const migrations = require('./test-migrations') -const { VERSION_KEY, CONFIG_KEY, getDatastoreAndOptions } = require('../src/utils') +const { VERSION_KEY, CONFIG_KEY, createStore } = require('../src/utils') module.exports = (setup, cleanup, repoOptions) => { let dir @@ -19,16 +19,11 @@ module.exports = (setup, cleanup, repoOptions) => { it('migrate forward', async () => { await migrator.migrate(dir, repoOptions, migrator.getLatestMigrationVersion(migrations), { - migrations: migrations + migrations: migrations, + onProgress: () => {} }) - const { - StorageBackend, - storageOptions - } = getDatastoreAndOptions(repoOptions, 'root') - - const store = new StorageBackend(dir, storageOptions) - + const store = await createStore(dir, 'root', repoOptions) await store.open() const version = await store.get(VERSION_KEY) expect(version.toString()).to.be.equal('2') @@ -41,20 +36,16 @@ module.exports = (setup, cleanup, repoOptions) => { it('revert', async () => { await migrator.migrate(dir, repoOptions, migrator.getLatestMigrationVersion(migrations), { - migrations: migrations + migrations: migrations, + onProgress: () => {} }) await migrator.revert(dir, repoOptions, 1, { - migrations: migrations + migrations: migrations, + onProgress: () => {} }) - const { - StorageBackend, - storageOptions - } = getDatastoreAndOptions(repoOptions, 'root') - - const store = new StorageBackend(dir, storageOptions) - + const store = await createStore(dir, 'root', repoOptions) await store.open() const version = await store.get(VERSION_KEY) expect(version.toString()).to.be.equal('1') diff --git a/test/lock-test.js b/test/lock-test.js index bcbc754..700fadb 100644 --- a/test/lock-test.js +++ b/test/lock-test.js @@ -1,13 +1,13 @@ /* eslint-env mocha */ 'use strict' -const { expect } = require('./util') +const { expect } = require('aegir/utils/chai') // When new lock mechanism is introduced in new version don't forget to update // the range (from/to) of the previous version test's description module.exports = (locker, setup, cleanup) => { - describe('version 7 and bellow', () => { + describe('version 7 and below', () => { let dir beforeEach(async () => { dir = await setup() }) diff --git a/test/migrations/index.js b/test/migrations/index.js index 89ebcf2..50c3016 100644 --- a/test/migrations/index.js +++ b/test/migrations/index.js @@ -1,18 +1,23 @@ /* eslint-env mocha */ 'use strict' +const DatastoreFS = require('datastore-fs') const DatastoreLevel = require('datastore-level') const CONFIGURATIONS = [{ name: 'with sharding', options: { storageBackends: { + root: DatastoreFS, + blocks: DatastoreFS, + datastore: DatastoreLevel, + keys: DatastoreLevel, pins: DatastoreLevel }, storageBackendOptions: { root: { sharding: true, - extension: '.data' + extension: '' }, blocks: { sharding: true, @@ -36,12 +41,16 @@ const CONFIGURATIONS = [{ name: 'without sharding', options: { storageBackends: { + root: DatastoreFS, + blocks: DatastoreFS, + datastore: DatastoreLevel, + keys: DatastoreLevel, pins: DatastoreLevel }, storageBackendOptions: { root: { sharding: false, - extension: '.data' + extension: '' }, blocks: { sharding: false, diff --git a/test/migrations/migration-8-test.js b/test/migrations/migration-8-test.js index 8499e69..4b2f280 100644 --- a/test/migrations/migration-8-test.js +++ b/test/migrations/migration-8-test.js @@ -3,11 +3,9 @@ const { expect } = require('aegir/utils/chai') +const { createStore } = require('../../src/utils') const migration = require('../../migrations/migration-8') const Key = require('interface-datastore').Key -const Datastore = require('datastore-fs') -const core = require('datastore-core') -const ShardingStore = core.ShardingDatastore const blocksFixtures = [ ['AFKREIBFG77IKIKDMBDUFDCSPK7H5TE5LNPMCSXYLPML27WSTT5YA5IUNU', @@ -52,52 +50,35 @@ const blocksFixtures = [ 'CIQFTFEEHEDF6KLBT32BFAGLXEZL4UWFNWM4LFTLMXQBCERZ6CMLX3Y'] ] -function maybeWithSharding (filestore, options) { - if (options.sharding === false) { - return filestore - } - - const shard = new core.shard.NextToLast(2) - - return ShardingStore.createOrOpen(filestore, shard) -} - -async function bootstrapBlocks (dir, encoded, options) { - const baseStore = new Datastore(`${dir}/blocks`, { extension: '.data', createIfMissing: true }) - await baseStore.open() - const store = await maybeWithSharding(baseStore, options) +async function bootstrapBlocks (dir, encoded, repoOptions) { + const store = await createStore(dir, 'blocks', repoOptions) await store.open() - let name for (const blocksNames of blocksFixtures) { - name = encoded ? blocksNames[1] : blocksNames[0] + const name = encoded ? blocksNames[1] : blocksNames[0] await store.put(new Key(name), '') } await store.close() - await baseStore.open() } -async function validateBlocks (dir, shouldBeEncoded, options) { - const baseStore = new Datastore(`${dir}/blocks`, { extension: '.data', createIfMissing: false }) - await baseStore.open() - const store = await maybeWithSharding(baseStore, options) +async function validateBlocks (dir, encoded, repoOptions) { + const store = await createStore(dir, 'blocks', repoOptions) await store.open() - let newName, oldName for (const blockNames of blocksFixtures) { - newName = shouldBeEncoded ? blockNames[1] : blockNames[0] - oldName = shouldBeEncoded ? blockNames[0] : blockNames[1] + const newName = encoded ? blockNames[1] : blockNames[0] + const oldName = encoded ? blockNames[0] : blockNames[1] expect(await store.has(new Key(`/${oldName}`))).to.be.false(`${oldName} was not migrated to ${newName}`) expect(await store.has(new Key(`/${newName}`))).to.be.true(`${newName} was not removed`) } await store.close() - await baseStore.close() } -module.exports = (setup, cleanup, options) => { - describe('migration 8', () => { +module.exports = (setup, cleanup, repoOptions) => { + describe('migration 8', function () { + this.timeout(240 * 1000) let dir beforeEach(async () => { @@ -109,15 +90,15 @@ module.exports = (setup, cleanup, options) => { }) it('should migrate blocks forward', async () => { - await bootstrapBlocks(dir, false, options.storageBackendOptions.blocks) - await migration.migrate(dir, options) - await validateBlocks(dir, true, options.storageBackendOptions.blocks) + await bootstrapBlocks(dir, false, repoOptions) + await migration.migrate(dir, repoOptions, () => {}) + await validateBlocks(dir, true, repoOptions) }) it('should migrate blocks backward', async () => { - await bootstrapBlocks(dir, true, options.storageBackendOptions.blocks) - await migration.revert(dir, options) - await validateBlocks(dir, false, options.storageBackendOptions.blocks) + await bootstrapBlocks(dir, true, repoOptions) + await migration.revert(dir, repoOptions, () => {}) + await validateBlocks(dir, false, repoOptions) }) }) } diff --git a/test/migrations/migration-9-test.js b/test/migrations/migration-9-test.js index f3f493d..b20fd70 100644 --- a/test/migrations/migration-9-test.js +++ b/test/migrations/migration-9-test.js @@ -5,7 +5,8 @@ const { expect } = require('aegir/utils/chai') const cbor = require('cbor') const migration = require('../../migrations/migration-9') -const { createStore, cidToKey, PIN_DS_KEY } = require('../../migrations/migration-9/utils') +const { cidToKey, PIN_DS_KEY } = require('../../migrations/migration-9/utils') +const { createStore } = require('../../src/utils') const CID = require('cids') const CarDatastore = require('datastore-car') const loadFixture = require('aegir/fixtures') @@ -78,19 +79,30 @@ async function bootstrapBlocks (blockstore, datastore, { car: carBuf, root: expe const [actualRoot] = await car.getRoots() expect(actualRoot.toString()).to.equal(expectedRoot.toString()) + await blockstore.open() for await (const { key, value } of car.query()) { await blockstore.put(cidToKey(new CID(key.toString())), value) } await blockstore.close() + + await datastore.open() await datastore.put(PIN_DS_KEY, actualRoot.multihash) await datastore.close() } -module.exports = (setup, cleanup, options) => { +async function assertPinsetRootIsPresent (datastore, pinset) { + await datastore.open() + const buf = await datastore.get(PIN_DS_KEY) + await datastore.close() + const cid = new CID(buf) + expect(cid.toString()).to.equal(pinset.root.toString()) +} + +module.exports = (setup, cleanup, repoOptions) => { describe('migration 9', function () { - this.timeout(240 * 1000) + this.timeout(480 * 1000) let dir let datastore @@ -100,17 +112,13 @@ module.exports = (setup, cleanup, options) => { beforeEach(async () => { dir = await setup() - blockstore = await createStore(dir, 'blocks', options) - datastore = await createStore(dir, 'datastore', options) - pinstore = await createStore(dir, 'pins', options) + blockstore = await createStore(dir, 'blocks', repoOptions) + datastore = await createStore(dir, 'datastore', repoOptions) + pinstore = await createStore(dir, 'pins', repoOptions) }) afterEach(async () => { - await pinstore.close() - await datastore.close() - await blockstore.close() - - cleanup(dir) + await cleanup(dir) }) Object.keys(pinsets).forEach(title => { @@ -120,23 +128,13 @@ module.exports = (setup, cleanup, options) => { describe(title, () => { describe('forwards', () => { beforeEach(async () => { - await blockstore.open() await bootstrapBlocks(blockstore, datastore, pinset) - - await datastore.open() - await expect(datastore.has(PIN_DS_KEY)).to.eventually.be.true() - - const buf = await datastore.get(PIN_DS_KEY) - const cid = new CID(buf) - expect(cid.toString()).to.equal(pinset.root.toString()) - - await blockstore.close() - await datastore.close() - await pinstore.close() + await assertPinsetRootIsPresent(datastore, pinset) }) it('should migrate pins forward', async () => { - await migration.migrate(dir, options) + await migration.migrate(dir, repoOptions, () => {}) + await pinstore.open() let migratedDirect = 0 let migratedNonDagPBRecursive = 0 @@ -160,12 +158,15 @@ module.exports = (setup, cleanup, options) => { } } + await pinstore.close() + expect(migratedDirect).to.equal(directPins.length + nonDagPbDirectPins.length) expect(migratedNonDagPBRecursive).to.equal(nonDagPbRecursivePins.length) expect(Object.keys(pinned)).to.have.lengthOf(pinset.pins) await datastore.open() await expect(datastore.has(PIN_DS_KEY)).to.eventually.be.false() + await datastore.close() }) }) @@ -181,14 +182,9 @@ module.exports = (setup, cleanup, options) => { }) it('should migrate pins backward', async () => { - await migration.revert(dir, options) - - await datastore.open() - await expect(datastore.has(PIN_DS_KEY)).to.eventually.be.true() + await migration.revert(dir, repoOptions, () => {}) - const buf = await datastore.get(PIN_DS_KEY) - const cid = new CID(buf) - expect(cid).to.deep.equal(pinset.root) + await assertPinsetRootIsPresent(datastore, pinset) }) }) }) diff --git a/test/node.js b/test/node.js index 7d1f9d6..807dbb6 100644 --- a/test/node.js +++ b/test/node.js @@ -23,6 +23,8 @@ const repoOptions = { extension: '.data' }, keys: { + }, + pins: { } } } diff --git a/test/test-migrations/migration-2/index.js b/test/test-migrations/migration-2/index.js index 4d8aa0b..982f024 100644 --- a/test/test-migrations/migration-2/index.js +++ b/test/test-migrations/migration-2/index.js @@ -1,9 +1,9 @@ 'use strict' -const Datastore = require('datastore-fs') const Key = require('interface-datastore').Key const _set = require('just-safe-set') const uint8ArrayFromString = require('uint8arrays/from-string') +const { createStore } = require('../../../src/utils') const CONFIG_KEY = new Key('config') const NEW_API_ADDRESS = '/ip6/::/tcp/5001' @@ -17,29 +17,6 @@ const NEW_API_ADDRESS = '/ip6/::/tcp/5001' * 2) Changes 'Gateway.HTTPHeaders.Access-Control-Allow-Origin' to specific origin */ -function datastoreFactory (repoPath, options) { - let StorageBackend, storageBackendOptions - if (options !== undefined && - options.storageBackends !== undefined && - options.storageBackends.root !== undefined - ) { - StorageBackend = options.storageBackends.root - } else { - StorageBackend = Datastore - } - - if (options !== undefined && - options.storageBackendOptions !== undefined && - options.storageBackendOptions.root !== undefined - ) { - storageBackendOptions = options.storageBackendOptions.root - } else { - storageBackendOptions = { extension: '' } - } - - return new StorageBackend(repoPath, storageBackendOptions) -} - function addNewApiAddress (config) { let apiAddrs = config.Addresses.API @@ -76,9 +53,10 @@ function removeNewApiAddress (config) { return config } -async function migrate (repoPath, options, isBrowser) { - const store = datastoreFactory(repoPath, options) +async function migrate (repoPath, repoOptions, onProgress) { + const store = await createStore(repoPath, 'root', repoOptions) await store.open() + try { const rawConfig = await store.get(CONFIG_KEY) let config = JSON.parse(rawConfig.toString()) @@ -94,11 +72,14 @@ async function migrate (repoPath, options, isBrowser) { } finally { await store.close() } + + onProgress(100, 'done!') } -async function revert (repoPath, options, isBrowser) { - const store = datastoreFactory(repoPath, options) +async function revert (repoPath, repoOptions, onProgress) { + const store = await createStore(repoPath, 'root', repoOptions) await store.open() + try { const rawConfig = await store.get(CONFIG_KEY) let config = JSON.parse(rawConfig.toString()) @@ -114,6 +95,8 @@ async function revert (repoPath, options, isBrowser) { } finally { await store.close() } + + onProgress(100, 'done!') } module.exports = { diff --git a/test/util.js b/test/util.js deleted file mode 100644 index 68cfd2a..0000000 --- a/test/util.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict' -const chai = require('chai') -const expect = chai.expect -const sinon = require('sinon') -chai.use(require('chai-as-promised')) -chai.use(require('dirty-chai')) - -module.exports = { - expect, - sinon -} diff --git a/test/version-test.js b/test/version-test.js index 264ecba..1b4e894 100644 --- a/test/version-test.js +++ b/test/version-test.js @@ -1,8 +1,8 @@ /* eslint-env mocha */ 'use strict' -const { expect } = require('./util') -const { VERSION_KEY, CONFIG_KEY, getDatastoreAndOptions } = require('../src/utils') +const { expect } = require('aegir/utils/chai') +const { VERSION_KEY, CONFIG_KEY, createStore } = require('../src/utils') const version = require('../src/repo/version') const uint8ArrayFromString = require('uint8arrays/from-string') const errors = require('../src/errors') @@ -24,19 +24,10 @@ module.exports = (setup, cleanup, repoOptions) => { .with.property('code', errors.NotInitializedRepoError.code) }) - describe('version 7 and bellow', () => { + describe('version 7 and below', () => { it('should get version number', async () => { // Create version file - const { - StorageBackend, - storageOptions - } = getDatastoreAndOptions(repoOptions, 'root') - - const store = new StorageBackend(dir, { - ...storageOptions, - createIfMissing: false - }) - + const store = await createStore(dir, 'root', repoOptions) await store.open() await store.put(CONFIG_KEY, uint8ArrayFromString('some dummy config')) await store.put(VERSION_KEY, uint8ArrayFromString('7')) @@ -49,16 +40,7 @@ module.exports = (setup, cleanup, repoOptions) => { await expect(version.getVersion(dir, repoOptions)).to.be.eventually.rejectedWith(errors.NotInitializedRepoError).with.property('code', errors.NotInitializedRepoError.code) // Create version file - const { - StorageBackend, - storageOptions - } = getDatastoreAndOptions(repoOptions, 'root') - - const store = new StorageBackend(dir, { - ...storageOptions, - createIfMissing: false - }) - + const store = await createStore(dir, 'root', repoOptions) await store.open() await store.put(CONFIG_KEY, uint8ArrayFromString('some dummy config')) await store.put(VERSION_KEY, uint8ArrayFromString('5'))