diff --git a/.aegir.js b/.aegir.js index f176614..4d0b129 100644 --- a/.aegir.js +++ b/.aegir.js @@ -4,7 +4,10 @@ module.exports = { webpack: { node: { // this is needed until level stops using node buffers in browser code - Buffer: true + Buffer: true, + + // needed by cbor, binary-parse-stream and nofilter + stream: true } } } diff --git a/README.md b/README.md index 78a2a15..edee377 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Migration tool for JS IPFS Repo +# Migration tool for JS IPFS Repo [![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfs-repo-migrations)](https://travis-ci.com/ipfs/js-ipfs-repo-migrations) [![codecov](https://codecov.io/gh/ipfs/js-ipfs-repo-migrations/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-repo-migrations) @@ -15,30 +15,36 @@ This package is inspired by the [go-ipfs repo migration tool](https://github.com/ipfs/fs-repo-migrations/) -## Lead Maintainer +## Lead Maintainer -[Adam Uhlíř](https://github.com/auhau/) +[Alex Potsides](http://github.com/achingbrain) -## Table of Contents +## Table of Contents - [Background](#background) - [Install](#install) - [npm](#npm) - [Use in Node.js](#use-in-nodejs) - [Use in a browser with browserify, webpack or any other bundler](#use-in-a-browser-with-browserify-webpack-or-any-other-bundler) - - [Use in a browser Using a script tag](#use-in-a-browser-using-a-script-tag) - [Usage](#usage) - [API](#api) + - [`.migrate(path, toVersion, {ignoreLock, repoOptions, onProgress, isDryRun}) -> Promise`](#migratepath-toversion-ignorelock-repooptions-onprogress-isdryrun---promisevoid) + - [`onProgress(migration, counter, totalMigrations)`](#onprogressmigration-counter-totalmigrations) + - [`.revert(path, toVersion, {ignoreLock, repoOptions, onProgress, isDryRun}) -> Promise`](#revertpath-toversion-ignorelock-repooptions-onprogress-isdryrun---promisevoid) + - [`getLatestMigrationVersion() -> int`](#getlatestmigrationversion---int) - [CLI](#cli) - [Creating a new migration](#creating-a-new-migration) - [Architecture of a migration](#architecture-of-a-migration) + - [`.migrate(repoPath, repoOptions)`](#migraterepopath-repooptions) + - [`.revert(repoPath, repoOptions)`](#revertrepopath-repooptions) - [Browser vs. NodeJS environments](#browser-vs-nodejs-environments) - [Guidelines](#guidelines) - [Integration with js-ipfs](#integration-with-js-ipfs) + - [Tests](#tests) - [Empty migrations](#empty-migrations) - [Migrations matrix](#migrations-matrix) - [Developer](#developer) - - [Module versioning notes](#module-versioning-notes) + - [Module versioning notes](#module-versioning-notes) - [Contribute](#contribute) - [License](#license) @@ -266,7 +272,9 @@ This will create an empty migration with the next version. | IPFS repo version | JS IPFS version | | -----------------: |:----------------:| -| 7 | v0.0.0 - latest | +| 7 | v0.0.0 | +| 8 | v0.48.0 | +| 9 | v0.49.0 | ## Developer diff --git a/migrations/migration-9/index.js b/migrations/migration-9/index.js new file mode 100644 index 0000000..e9d1785 --- /dev/null +++ b/migrations/migration-9/index.js @@ -0,0 +1,107 @@ +'use strict' + +const CID = require('cids') +const dagpb = require('ipld-dag-pb') +const cbor = require('cbor') +const multicodec = require('multicodec') +const multibase = require('multibase') +const pinset = require('./pin-set') +const { createStore, cidToKey, PIN_DS_KEY, PinTypes } = require('./utils') + +async function pinsToDatastore (blockstore, datastore, pinstore) { + const mh = await datastore.get(PIN_DS_KEY) + const cid = new CID(mh) + + const pinRootBuf = await blockstore.get(cidToKey(cid)) + const pinRoot = dagpb.util.deserialize(pinRootBuf) + + for await (const cid of pinset.loadSet(blockstore, pinRoot, PinTypes.recursive)) { + const pin = {} + + if (cid.version !== 0) { + pin.version = version + } + + if (cid.codec !== 'dag-pb') { + pin.codec = multicodec.getNumber(cid.codec) + } + + await pinstore.put(cidToKey(cid), cbor.encode(pin)) + } + + for await (const cid of pinset.loadSet(blockstore, pinRoot, PinTypes.direct)) { + const pin = { + depth: 0 + } + + if (cid.version !== 0) { + pin.version = version + } + + if (cid.codec !== 'dag-pb') { + pin.codec = multicodec.getNumber(cid.codec) + } + + await pinstore.put(cidToKey(cid), cbor.encode(pin)) + } + + await blockstore.delete(cidToKey(cid)) + await datastore.delete(PIN_DS_KEY) +} + +async function pinsToDAG (blockstore, datastore, pinstore) { + let recursivePins = [] + let directPins = [] + + for await (const { key, value } of pinstore.query({})) { + const pin = cbor.decode(value) + const cid = new CID(pin.version || 0, pin.codec && multicodec.getName(pin.codec) || 'dag-pb', multibase.decode('b' + key.toString().split('/').pop())) + + if (pin.depth === 0) { + directPins.push(cid) + } else { + recursivePins.push(cid) + } + } + + const pinRoot = new dagpb.DAGNode(Buffer.alloc(0), [ + await pinset.storeSet(blockstore, PinTypes.recursive, recursivePins), + await pinset.storeSet(blockstore, PinTypes.direct, directPins) + ]) + const buf = pinRoot.serialize() + const cid = await dagpb.util.cid(buf, { + cidVersion: 0, + hashAlg: multicodec.SHA2_256, + }) + await blockstore.put(cidToKey(cid), buf) + await datastore.put(PIN_DS_KEY, cid.multihash) +} + +async function process (repoPath, options, fn) { + const blockstore = await createStore(repoPath, 'blocks', options) + const datastore = await createStore(repoPath, 'datastore', options) + const pinstore = await createStore(repoPath, 'pins', options) + + await blockstore.open() + await datastore.open() + await pinstore.open() + + try { + await fn(blockstore, datastore, pinstore) + } finally { + await pinstore.close() + await datastore.close() + await blockstore.close() + } +} + +module.exports = { + version: 9, + description: 'Migrates pins to datastore', + migrate: (repoPath, options = {}) => { + return process(repoPath, options, pinsToDatastore) + }, + revert: (repoPath, options = {}) => { + return process(repoPath, options, pinsToDAG) + } +} diff --git a/migrations/migration-9/pin-set.js b/migrations/migration-9/pin-set.js new file mode 100644 index 0000000..a3f25c3 --- /dev/null +++ b/migrations/migration-9/pin-set.js @@ -0,0 +1,205 @@ +'use strict' + +const CID = require('cids') +const protobuf = require('protons') +const fnv1a = require('fnv1a') +const varint = require('varint') +const dagpb = require('ipld-dag-pb') +const { DAGNode, DAGLink } = dagpb +const multicodec = require('multicodec') +const pbSchema = require('./pin.proto') +const { Buffer } = require('buffer') +const { cidToKey, DEFAULT_FANOUT, MAX_ITEMS, EMPTY_KEY } = require('./utils') + +const pb = protobuf(pbSchema) + +function toB58String (hash) { + return new CID(hash).toBaseEncodedString() +} + +function readHeader (rootNode) { + // rootNode.data should be a buffer of the format: + // < varint(headerLength) | header | itemData... > + const rootData = rootNode.Data + const hdrLength = varint.decode(rootData) + const vBytes = varint.decode.bytes + + if (vBytes <= 0) { + throw new Error('Invalid Set header length') + } + + if (vBytes + hdrLength > rootData.length) { + throw new Error('Impossibly large set header length') + } + + const hdrSlice = rootData.slice(vBytes, hdrLength + vBytes) + const header = pb.Set.decode(hdrSlice) + + if (header.version !== 1) { + throw new Error(`Unsupported Set version: ${header.version}`) + } + + if (header.fanout > rootNode.Links.length) { + throw new Error('Impossibly large fanout') + } + + return { + header: header, + data: rootData.slice(hdrLength + vBytes) + } +} + +function hash (seed, key) { + const buf = Buffer.alloc(4) + buf.writeUInt32LE(seed, 0) + const data = Buffer.concat([ + buf, Buffer.from(toB58String(key)) + ]) + return fnv1a(data.toString('binary')) +} + +async function * walkItems (blockstore, node) { + const pbh = readHeader(node) + let idx = 0 + + for (const link of node.Links) { + if (idx < pbh.header.fanout) { + // the first pbh.header.fanout links are fanout bins + // if a fanout bin is not 'empty', dig into and walk its DAGLinks + const linkHash = link.Hash + + if (!EMPTY_KEY.equals(linkHash.buffer)) { + // walk the links of this fanout bin + const buf = await blockstore.get(cidToKey(linkHash)) + const node = dagpb.util.deserialize(buf) + + yield * walkItems(blockstore, node) + } + } else { + // otherwise, the link is a pin + yield link.Hash + } + + idx++ + } +} + +async function * loadSet (blockstore, rootNode, name) { + const link = rootNode.Links.find(l => l.Name === name) + + if (!link) { + throw new Error('No link found with name ' + name) + } + + const buf = await blockstore.get(cidToKey(link.Hash)) + const node = dagpb.util.deserialize(buf) + + yield * walkItems(blockstore, node) +} + +function storeItems (blockstore, items) { + return storePins(items, 0) + + async function storePins (pins, depth) { + const pbHeader = pb.Set.encode({ + version: 1, + fanout: DEFAULT_FANOUT, + seed: depth + }) + const headerBuf = Buffer.concat([ + Buffer.from(varint.encode(pbHeader.length)), pbHeader + ]) + const fanoutLinks = [] + + for (let i = 0; i < DEFAULT_FANOUT; i++) { + fanoutLinks.push(new DAGLink('', 1, EMPTY_KEY)) + } + + if (pins.length <= MAX_ITEMS) { + const nodes = pins + .map(item => { + return ({ + link: new DAGLink('', 1, item.key), + data: item.data || Buffer.alloc(0) + }) + }) + // sorting makes any ordering of `pins` produce the same DAGNode + .sort((a, b) => Buffer.compare(a.link.Hash.buffer, b.link.Hash.buffer)) + + const rootLinks = fanoutLinks.concat(nodes.map(item => item.link)) + const rootData = Buffer.concat( + [headerBuf].concat(nodes.map(item => item.data)) + ) + + return new DAGNode(rootData, rootLinks) + } else { + // If the array of pins is > MAX_ITEMS, we: + // - distribute the pins among `DEFAULT_FANOUT` bins + // - create a DAGNode for each bin + // - add each pin as a DAGLink to that bin + // - create a root DAGNode + // - add each bin as a DAGLink + // - send that root DAGNode via callback + // (using go-ipfs' "wasteful but simple" approach for consistency) + // https://github.com/ipfs/go-ipfs/blob/master/pin/set.go#L57 + + const bins = pins.reduce((bins, pin) => { + const n = hash(depth, pin.key) % DEFAULT_FANOUT + bins[n] = n in bins ? bins[n].concat([pin]) : [pin] + return bins + }, []) + + let idx = 0 + for (const bin of bins) { + const child = await storePins(bin, depth + 1) + + await storeChild(child, idx) + + idx++ + } + + return new DAGNode(headerBuf, fanoutLinks) + } + + async function storeChild (child, binIdx) { + const opts = { + version: 0, + format: multicodec.DAG_PB, + hashAlg: multicodec.SHA2_256, + preload: false + } + + const buf = dagpb.util.serialize(child) + const cid = dagpb.util.cid(buf, { + cidVersion: 0, + hashAlg: multicodec.SHA2_256, + }) + await blockstore.put(cidToKey(cid), buf) + + fanoutLinks[binIdx] = new DAGLink('', child.size, cid) + } + } +} + +async function storeSet (blockstore, type, cids) { + const rootNode = await storeItems(blockstore, cids.map(cid => { + return { + key: cid, + data: null + } + })) + const buf = rootNode.serialize(rootNode) + const cid = await dagpb.util.cid(buf, { + cidVersion: 0, + hashAlg: multicodec.SHA2_256 + }) + + await blockstore.put(cidToKey(cid), buf) + + return new DAGLink(type, rootNode.size, cid) +} + +module.exports = { + loadSet, + storeSet +} diff --git a/migrations/migration-9/pin.proto.js b/migrations/migration-9/pin.proto.js new file mode 100644 index 0000000..8e94fd8 --- /dev/null +++ b/migrations/migration-9/pin.proto.js @@ -0,0 +1,19 @@ +'use strict' + +/** + * Protobuf interface + * from go-ipfs/pin/internal/pb/header.proto + */ +module.exports = ` + syntax = "proto2"; + + package ipfs.pin; + + option go_package = "pb"; + + message Set { + optional uint32 version = 1; + optional uint32 fanout = 2; + optional fixed32 seed = 3; + } +` diff --git a/migrations/migration-9/utils.js b/migrations/migration-9/utils.js new file mode 100644 index 0000000..893bba8 --- /dev/null +++ b/migrations/migration-9/utils.js @@ -0,0 +1,54 @@ +'use strict' + +const core = require('datastore-core') +const ShardingStore = core.ShardingDatastore +const utils = require('../../src/utils') +const multibase = require('multibase') +const { Key } = require('interface-datastore') +const multihashes = require('multihashing-async').multihash + +const PIN_DS_KEY = new Key('/local/pins') +const DEFAULT_FANOUT = 256 +const MAX_ITEMS = 8192 +const EMPTY_KEY = multihashes.fromB58String('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') + +const PinTypes = { + direct: 'direct', + recursive: 'recursive' +} + +function cidToKey (cid) { + return new Key(`/${multibase.encoding('base32upper').encode(cid.multihash)}`) +} + +// This function in js-ipfs-repo defaults to not using sharding +// but the default value of the options.sharding is true hence this +// function defaults to use sharding. +async function maybeWithSharding (filestore, options) { + if (options.sharding === false) { + return filestore + } + + const shard = new core.shard.NextToLast(2) + + return ShardingStore.createOrOpen(filestore, shard) +} + +const createStore = async (location, name, options) => { + const { StorageBackend, storageOptions } = utils.getDatastoreAndOptions(options, name) + + let store = new StorageBackend(`${location}/${name}`, storageOptions) + store = maybeWithSharding(store, storageOptions) + + return store +} + +module.exports = { + PIN_DS_KEY, + DEFAULT_FANOUT, + MAX_ITEMS, + EMPTY_KEY, + PinTypes, + createStore, + cidToKey +} diff --git a/package.json b/package.json index 2e27451..dd3d5d7 100644 --- a/package.json +++ b/package.json @@ -45,15 +45,22 @@ }, "dependencies": { "buffer": "^5.6.0", + "cbor": "^5.0.2", "chalk": "^4.0.0", "cids": "^0.8.3", "datastore-core": "^1.1.0", "datastore-fs": "^1.0.0", "datastore-level": "^1.1.0", "debug": "^4.1.0", + "fnv1a": "^1.0.1", "interface-datastore": "^1.0.2", + "ipld-dag-pb": "^0.18.5", "multibase": "^1.0.1", + "multicodec": "^1.0.3", + "multihashing-async": "^1.0.0", "proper-lockfile": "^4.1.1", + "protons": "^1.2.1", + "varint": "^5.0.0", "yargs": "^15.3.1", "yargs-promise": "^1.1.0" }, @@ -62,6 +69,7 @@ "chai": "^4.2.0", "chai-as-promised": "^7.1.1", "dirty-chai": "^2.0.1", + "it-all": "^1.0.2", "just-safe-set": "^2.1.0", "ncp": "^2.0.0", "rimraf": "^3.0.0", diff --git a/test/browser.js b/test/browser.js index a8f55dd..5ea35cd 100644 --- a/test/browser.js +++ b/test/browser.js @@ -44,8 +44,8 @@ describe('Browser specific tests', () => { require('./version-test')(createRepo, repoCleanup) }) - describe('migrations tests', () => { - require('./migrations/migration-8-test')(createRepo, repoCleanup) + describe('migration tests', () => { + require('./migrations')(createRepo, repoCleanup) }) describe('init tests', () => { diff --git a/test/migrations/index.js b/test/migrations/index.js new file mode 100644 index 0000000..895d70c --- /dev/null +++ b/test/migrations/index.js @@ -0,0 +1,65 @@ +/* eslint-env mocha */ +'use strict' + +const CONFIGURATIONS = [{ + name: 'with sharding', + options: { + storageBackendOptions: { + root: { + sharding: true, + extension: '.data' + }, + blocks: { + sharding: true, + extension: '.data' + }, + datastore: { + sharding: true, + extension: '.data' + }, + keys: { + sharding: true, + extension: '.data' + }, + pinstore: { + sharding: true, + extension: '.data' + } + } + } +}, { + name: 'without sharding', + options: { + storageBackendOptions: { + root: { + sharding: false, + extension: '.data' + }, + blocks: { + sharding: false, + extension: '.data' + }, + datastore: { + sharding: false, + extension: '.data' + }, + keys: { + sharding: false, + extension: '.data' + }, + pinstore: { + sharding: false, + extension: '.data' + } + } + } +}] + +module.exports = (createRepo, repoCleanup) => { + CONFIGURATIONS.forEach(({ name, options }) => { + describe(name, () => { + require('./migration-8-test')(createRepo, repoCleanup, options) + require('./migration-9-test')(createRepo, repoCleanup, options) + }) + }) +} diff --git a/test/migrations/migration-8-test.js b/test/migrations/migration-8-test.js index 9f7cf42..540dd1b 100644 --- a/test/migrations/migration-8-test.js +++ b/test/migrations/migration-8-test.js @@ -100,49 +100,28 @@ async function validateBlocks (dir, shouldBeEncoded, options) { await baseStore.close() } -const CONFIGURATIONS = [{ - name: 'with block sharding', - options: { - storageBackendOptions: { - blocks: { - sharding: true, - extension: '.data' - } - } - } -}, { - name: 'without block sharding', - options: { - storageBackendOptions: { - blocks: { - sharding: false, - extension: '.data' - } - } - } -}] - -module.exports = (setup, cleanup) => { +module.exports = (setup, cleanup, options) => { describe('migration 8', () => { let dir beforeEach(async () => { dir = await setup() }) - afterEach(() => cleanup(dir)) - - CONFIGURATIONS.forEach(({ name, options }) => { - it(`should migrate blocks forward ${name}`, async () => { - await bootstrapBlocks(dir, false, options.storageBackendOptions.blocks) - await migration.migrate(dir, options) - await validateBlocks(dir, true, options.storageBackendOptions.blocks) - }) - - it(`should migrate blocks backward ${name}`, async () => { - await bootstrapBlocks(dir, true, options.storageBackendOptions.blocks) - await migration.revert(dir, options) - await validateBlocks(dir, false, options.storageBackendOptions.blocks) - }) + + afterEach(async () => { + await cleanup(dir) + }) + + it('should migrate blocks forward', async () => { + await bootstrapBlocks(dir, false, options.storageBackendOptions.blocks) + await migration.migrate(dir, options) + await validateBlocks(dir, true, options.storageBackendOptions.blocks) + }) + + it('should migrate blocks backward', async () => { + await bootstrapBlocks(dir, true, options.storageBackendOptions.blocks) + await migration.revert(dir, options) + await validateBlocks(dir, false, options.storageBackendOptions.blocks) }) }) } diff --git a/test/migrations/migration-9-test.js b/test/migrations/migration-9-test.js new file mode 100644 index 0000000..9a60b36 --- /dev/null +++ b/test/migrations/migration-9-test.js @@ -0,0 +1,153 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +chai.use(require('dirty-chai')) +const chaiAsPromised = require('chai-as-promised') +chai.use(chaiAsPromised) +const expect = chai.expect +const dagpb = require('ipld-dag-pb') +const { DAGNode, DAGLink } = dagpb +const multicodec = require('multicodec') +const multibase = require('multibase') +const all = require('it-all') +const cbor = require('cbor') + +const migration = require('../../migrations/migration-9') +const { createStore, cidToKey, PIN_DS_KEY, DEFAULT_FANOUT } = require('../../migrations/migration-9/utils') +const CID = require('cids') + +function keyToCid (key) { + const buf = Buffer.from(multibase.encoding('base32upper').decode(key.toString().split('/').pop())) + return new CID(buf) +} + +const pinnedCid = new CID('QmfGBRT6BbWJd7yUc2uYdaUZJBbnEFvTqehPFoSMQ6wgdr') +const pinRootCid = new CID('QmZ9ANfh6BMFoeinQU1WQ2BQrRea4UusEikQ1kupx3HtsY') + +// creates the pinset with the 'welcome to IPFS' files you get when you `jsipfs init` +async function bootstrapBlocks (blockstore, datastore) { + async function putNode (node, expectedCid) { + const buf = node.serialize() + const cid = await dagpb.util.cid(buf, { + cidVersion: 0, + hashAlg: multicodec.SHA2_256 + }) + expect(cid.toString()).to.equal(expectedCid) + await blockstore.put(cidToKey(cid), buf) + + return node.toDAGLink({ + name: '', + cidVersion: 0, + hashAlg: multicodec.SHA2_256 + }) + } + + const emptyBlock = await putNode( + new DAGNode(), + 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n' + ) + const bucket = new Array(DEFAULT_FANOUT).fill(0).map(() => new DAGLink('', 1, emptyBlock.Hash)) + const directLinks = await putNode( + new DAGNode(Buffer.from('CggBEIACHQAAAAA=', 'base64'), bucket), + 'QmbxHkprr5qdLSK8EZWdBzKFzNXGoKrxb7A4PHX3eH6JPp' + ) + const recursiveLinks = await putNode( + new DAGNode(Buffer.from('CggBEIACHQAAAAA=', 'base64'), [ + ...bucket, + new DAGLink('', 1, pinnedCid) + ]), + 'QmdEtks1KYQsrgJ8FXpP1vXygnVHSqnyFTKQ3wcWVd4D2y' + ) + + const pinRoot = await putNode( + new DAGNode(Buffer.alloc(0), [ + new DAGLink('direct', directLinks.Tsize, directLinks.Hash), + new DAGLink('recursive', recursiveLinks.Tsize, recursiveLinks.Hash) + ]), + pinRootCid.toString() + ) + + await blockstore.close() + await datastore.put(PIN_DS_KEY, pinRoot.Hash.multihash) + await datastore.close() +} + +module.exports = (setup, cleanup, options) => { + describe('migration 9', () => { + let dir + let datastore + let blockstore + let pinstore + + beforeEach(async () => { + dir = await setup() + + blockstore = await createStore(dir, 'blocks', options) + datastore = await createStore(dir, 'datastore', options) + pinstore = await createStore(dir, 'pins', options) + }) + + afterEach(async () => { + await pinstore.close() + await datastore.close() + await blockstore.close() + + cleanup(dir) + }) + + describe('forwards', () => { + beforeEach(async () => { + await blockstore.open() + await bootstrapBlocks(blockstore, datastore) + + await datastore.open() + await expect(datastore.has(PIN_DS_KEY)).to.eventually.be.true() + + await blockstore.close() + await datastore.close() + }) + + it('should migrate pins forward', async () => { + await migration.migrate(dir, options) + + await pinstore.open() + + const pins = await all(pinstore.query({})) + expect(pins).to.have.lengthOf(1) + + const key = pins[0].key + expect(keyToCid(key).toString()).to.equal(pinnedCid.toString()) + + const pin = cbor.decode(pins[0].value) + expect(pin.depth).to.be.undefined() + + await datastore.open() + await expect(datastore.has(PIN_DS_KEY)).to.eventually.be.false() + }) + }) + + describe('backwards', () => { + beforeEach(async () => { + await pinstore.open() + await pinstore.put(cidToKey(pinnedCid), cbor.encode({ + metadata: { + foo: 'bar' + } + })) + await pinstore.close() + }) + + it('should migrate pins backward', async () => { + await migration.revert(dir, options) + + await datastore.open() + await expect(datastore.has(PIN_DS_KEY)).to.eventually.be.true() + + const buf = await datastore.get(PIN_DS_KEY) + const cid = new CID(buf) + expect(cid.toString()).to.equal(pinRootCid.toString()) + }) + }) + }) +} diff --git a/test/node.js b/test/node.js index 584275d..f515c96 100644 --- a/test/node.js +++ b/test/node.js @@ -43,8 +43,8 @@ describe('Node specific tests', () => { require('./version-test')(createRepo, repoCleanup) }) - describe('migrations tests', () => { - require('./migrations/migration-8-test')(createRepo, repoCleanup) + describe('migration tests', () => { + require('./migrations')(createRepo, repoCleanup) }) describe('init tests', () => {