From d7a88b2b61ac78f49c582243edc236f98e58a41a Mon Sep 17 00:00:00 2001 From: achingbrain Date: Wed, 1 Jul 2020 19:18:45 +0100 Subject: [PATCH 1/8] feat: add migration 9 to migrate pins to the datastore and back Stores pins in the datastore for greatly increased speed of access. Borrows the pin-set code from js-ipfs to perform the reverse migration. --- migrations/migration-9/index.js | 91 ++++++++++++ migrations/migration-9/pin-set.js | 205 ++++++++++++++++++++++++++++ migrations/migration-9/pin.proto.js | 19 +++ migrations/migration-9/utils.js | 55 ++++++++ package.json | 8 ++ test/migrations/migration-9-test.js | 140 +++++++++++++++++++ test/node.js | 1 + 7 files changed, 519 insertions(+) create mode 100644 migrations/migration-9/index.js create mode 100644 migrations/migration-9/pin-set.js create mode 100644 migrations/migration-9/pin.proto.js create mode 100644 migrations/migration-9/utils.js create mode 100644 test/migrations/migration-9-test.js diff --git a/migrations/migration-9/index.js b/migrations/migration-9/index.js new file mode 100644 index 0000000..9edecc0 --- /dev/null +++ b/migrations/migration-9/index.js @@ -0,0 +1,91 @@ +'use strict' + +const CID = require('cids') +const dagpb = require('ipld-dag-pb') +const cbor = require('cbor') +const multicodec = require('multicodec') +const pinset = require('./pin-set') +const { createStore, cidToKey, PIN_DS_KEY, PinTypes } = require('./utils') + +async function pinsToDatastore (blockstore, datastore, pinstore) { + const mh = await datastore.get(PIN_DS_KEY) + const cid = new CID(mh) + + const pinRootBuf = await blockstore.get(cidToKey(cid)) + const pinRoot = dagpb.util.deserialize(pinRootBuf) + + for await (const cid of pinset.loadSet(blockstore, pinRoot, PinTypes.recursive)) { + await pinstore.put(cidToKey(cid), cbor.encode({ + cid: cid.buffer, + type: PinTypes.recursive + })) + } + + for await (const cid of pinset.loadSet(blockstore, pinRoot, PinTypes.direct)) { + await pinstore.put(cidToKey(cid), cbor.encode({ + cid: cid.buffer, + type: PinTypes.direct + })) + } + + await blockstore.delete(cidToKey(cid)) + await datastore.delete(PIN_DS_KEY) +} + +async function pinsToDAG (blockstore, datastore, pinstore) { + let recursivePins = [] + let directPins = [] + + for await (const { value } of pinstore.query({})) { + const pin = cbor.decode(value) + + if (pin.type === PinTypes.recursive) { + recursivePins.push(new CID(pin.cid)) + } + + if (pin.type === PinTypes.direct) { + directPins.push(new CID(pin.cid)) + } + } + + const pinRoot = new dagpb.DAGNode(Buffer.alloc(0), [ + await pinset.storeSet(blockstore, PinTypes.recursive, recursivePins), + await pinset.storeSet(blockstore, PinTypes.direct, directPins) + ]) + const buf = pinRoot.serialize() + const cid = await dagpb.util.cid(buf, { + cidVersion: 0, + hashAlg: multicodec.SHA2_256, + }) + await blockstore.put(cidToKey(cid), buf) + await datastore.put(PIN_DS_KEY, cid.multihash) +} + +async function process (repoPath, options, fn) { + const blockstore = await createStore(repoPath, 'blocks', options) + const datastore = await createStore(repoPath, 'datastore', options) + const pinstore = await createStore(repoPath, 'pins', options) + + await blockstore.open() + await datastore.open() + await pinstore.open() + + try { + await fn(blockstore, datastore, pinstore) + } finally { + await pinstore.close() + await datastore.close() + await blockstore.close() + } +} + +module.exports = { + version: 9, + description: 'Migrates pins to datastore', + migrate: (repoPath, options = {}) => { + return process(repoPath, options, pinsToDatastore) + }, + revert: (repoPath, options = {}) => { + return process(repoPath, options, pinsToDAG) + } +} diff --git a/migrations/migration-9/pin-set.js b/migrations/migration-9/pin-set.js new file mode 100644 index 0000000..a3f25c3 --- /dev/null +++ b/migrations/migration-9/pin-set.js @@ -0,0 +1,205 @@ +'use strict' + +const CID = require('cids') +const protobuf = require('protons') +const fnv1a = require('fnv1a') +const varint = require('varint') +const dagpb = require('ipld-dag-pb') +const { DAGNode, DAGLink } = dagpb +const multicodec = require('multicodec') +const pbSchema = require('./pin.proto') +const { Buffer } = require('buffer') +const { cidToKey, DEFAULT_FANOUT, MAX_ITEMS, EMPTY_KEY } = require('./utils') + +const pb = protobuf(pbSchema) + +function toB58String (hash) { + return new CID(hash).toBaseEncodedString() +} + +function readHeader (rootNode) { + // rootNode.data should be a buffer of the format: + // < varint(headerLength) | header | itemData... > + const rootData = rootNode.Data + const hdrLength = varint.decode(rootData) + const vBytes = varint.decode.bytes + + if (vBytes <= 0) { + throw new Error('Invalid Set header length') + } + + if (vBytes + hdrLength > rootData.length) { + throw new Error('Impossibly large set header length') + } + + const hdrSlice = rootData.slice(vBytes, hdrLength + vBytes) + const header = pb.Set.decode(hdrSlice) + + if (header.version !== 1) { + throw new Error(`Unsupported Set version: ${header.version}`) + } + + if (header.fanout > rootNode.Links.length) { + throw new Error('Impossibly large fanout') + } + + return { + header: header, + data: rootData.slice(hdrLength + vBytes) + } +} + +function hash (seed, key) { + const buf = Buffer.alloc(4) + buf.writeUInt32LE(seed, 0) + const data = Buffer.concat([ + buf, Buffer.from(toB58String(key)) + ]) + return fnv1a(data.toString('binary')) +} + +async function * walkItems (blockstore, node) { + const pbh = readHeader(node) + let idx = 0 + + for (const link of node.Links) { + if (idx < pbh.header.fanout) { + // the first pbh.header.fanout links are fanout bins + // if a fanout bin is not 'empty', dig into and walk its DAGLinks + const linkHash = link.Hash + + if (!EMPTY_KEY.equals(linkHash.buffer)) { + // walk the links of this fanout bin + const buf = await blockstore.get(cidToKey(linkHash)) + const node = dagpb.util.deserialize(buf) + + yield * walkItems(blockstore, node) + } + } else { + // otherwise, the link is a pin + yield link.Hash + } + + idx++ + } +} + +async function * loadSet (blockstore, rootNode, name) { + const link = rootNode.Links.find(l => l.Name === name) + + if (!link) { + throw new Error('No link found with name ' + name) + } + + const buf = await blockstore.get(cidToKey(link.Hash)) + const node = dagpb.util.deserialize(buf) + + yield * walkItems(blockstore, node) +} + +function storeItems (blockstore, items) { + return storePins(items, 0) + + async function storePins (pins, depth) { + const pbHeader = pb.Set.encode({ + version: 1, + fanout: DEFAULT_FANOUT, + seed: depth + }) + const headerBuf = Buffer.concat([ + Buffer.from(varint.encode(pbHeader.length)), pbHeader + ]) + const fanoutLinks = [] + + for (let i = 0; i < DEFAULT_FANOUT; i++) { + fanoutLinks.push(new DAGLink('', 1, EMPTY_KEY)) + } + + if (pins.length <= MAX_ITEMS) { + const nodes = pins + .map(item => { + return ({ + link: new DAGLink('', 1, item.key), + data: item.data || Buffer.alloc(0) + }) + }) + // sorting makes any ordering of `pins` produce the same DAGNode + .sort((a, b) => Buffer.compare(a.link.Hash.buffer, b.link.Hash.buffer)) + + const rootLinks = fanoutLinks.concat(nodes.map(item => item.link)) + const rootData = Buffer.concat( + [headerBuf].concat(nodes.map(item => item.data)) + ) + + return new DAGNode(rootData, rootLinks) + } else { + // If the array of pins is > MAX_ITEMS, we: + // - distribute the pins among `DEFAULT_FANOUT` bins + // - create a DAGNode for each bin + // - add each pin as a DAGLink to that bin + // - create a root DAGNode + // - add each bin as a DAGLink + // - send that root DAGNode via callback + // (using go-ipfs' "wasteful but simple" approach for consistency) + // https://github.com/ipfs/go-ipfs/blob/master/pin/set.go#L57 + + const bins = pins.reduce((bins, pin) => { + const n = hash(depth, pin.key) % DEFAULT_FANOUT + bins[n] = n in bins ? bins[n].concat([pin]) : [pin] + return bins + }, []) + + let idx = 0 + for (const bin of bins) { + const child = await storePins(bin, depth + 1) + + await storeChild(child, idx) + + idx++ + } + + return new DAGNode(headerBuf, fanoutLinks) + } + + async function storeChild (child, binIdx) { + const opts = { + version: 0, + format: multicodec.DAG_PB, + hashAlg: multicodec.SHA2_256, + preload: false + } + + const buf = dagpb.util.serialize(child) + const cid = dagpb.util.cid(buf, { + cidVersion: 0, + hashAlg: multicodec.SHA2_256, + }) + await blockstore.put(cidToKey(cid), buf) + + fanoutLinks[binIdx] = new DAGLink('', child.size, cid) + } + } +} + +async function storeSet (blockstore, type, cids) { + const rootNode = await storeItems(blockstore, cids.map(cid => { + return { + key: cid, + data: null + } + })) + const buf = rootNode.serialize(rootNode) + const cid = await dagpb.util.cid(buf, { + cidVersion: 0, + hashAlg: multicodec.SHA2_256 + }) + + await blockstore.put(cidToKey(cid), buf) + + return new DAGLink(type, rootNode.size, cid) +} + +module.exports = { + loadSet, + storeSet +} diff --git a/migrations/migration-9/pin.proto.js b/migrations/migration-9/pin.proto.js new file mode 100644 index 0000000..8e94fd8 --- /dev/null +++ b/migrations/migration-9/pin.proto.js @@ -0,0 +1,19 @@ +'use strict' + +/** + * Protobuf interface + * from go-ipfs/pin/internal/pb/header.proto + */ +module.exports = ` + syntax = "proto2"; + + package ipfs.pin; + + option go_package = "pb"; + + message Set { + optional uint32 version = 1; + optional uint32 fanout = 2; + optional fixed32 seed = 3; + } +` diff --git a/migrations/migration-9/utils.js b/migrations/migration-9/utils.js new file mode 100644 index 0000000..22b624f --- /dev/null +++ b/migrations/migration-9/utils.js @@ -0,0 +1,55 @@ +'use strict' + +const path = require('path') +const core = require('datastore-core') +const ShardingStore = core.ShardingDatastore +const utils = require('../../src/utils') +const multibase = require('multibase') +const { Key } = require('interface-datastore') +const multihashes = require('multihashing-async').multihash + +const PIN_DS_KEY = new Key('/local/pins') +const DEFAULT_FANOUT = 256 +const MAX_ITEMS = 8192 +const EMPTY_KEY = multihashes.fromB58String('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') + +const PinTypes = { + direct: 'direct', + recursive: 'recursive' +} + +function cidToKey (cid) { + return new Key(`/${multibase.encoding('base32upper').encode(cid.multihash)}`) +} + +// This function in js-ipfs-repo defaults to not using sharding +// but the default value of the options.sharding is true hence this +// function defaults to use sharding. +async function maybeWithSharding (filestore, options) { + if (options.sharding === false) { + return filestore + } + + const shard = new core.shard.NextToLast(2) + + return ShardingStore.createOrOpen(filestore, shard) +} + +const createStore = async (location, name, options) => { + const { StorageBackend, storageOptions } = utils.getDatastoreAndOptions(options, name) + + let store = new StorageBackend(path.join(location, name), storageOptions) + store = maybeWithSharding(store, storageOptions) + + return store +} + +module.exports = { + PIN_DS_KEY, + DEFAULT_FANOUT, + MAX_ITEMS, + EMPTY_KEY, + PinTypes, + createStore, + cidToKey +} diff --git a/package.json b/package.json index c342db8..2bea742 100644 --- a/package.json +++ b/package.json @@ -45,15 +45,22 @@ }, "dependencies": { "buffer": "^5.6.0", + "cbor": "^5.0.2", "chalk": "^4.0.0", "cids": "^0.8.3", "datastore-core": "^1.1.0", "datastore-fs": "^1.0.0", "datastore-level": "^1.1.0", "debug": "^4.1.0", + "fnv1a": "^1.0.1", "interface-datastore": "^1.0.2", + "ipld-dag-pb": "^0.18.5", "multibase": "^1.0.1", + "multicodec": "^1.0.3", + "multihashing-async": "^1.0.0", "proper-lockfile": "^4.1.1", + "protons": "^1.2.1", + "varint": "^5.0.0", "yargs": "^15.3.1", "yargs-promise": "^1.1.0" }, @@ -62,6 +69,7 @@ "chai": "^4.2.0", "chai-as-promised": "^7.1.1", "dirty-chai": "^2.0.1", + "it-all": "^1.0.2", "just-safe-set": "^2.1.0", "ncp": "^2.0.0", "rimraf": "^3.0.0", diff --git a/test/migrations/migration-9-test.js b/test/migrations/migration-9-test.js new file mode 100644 index 0000000..51dc0f5 --- /dev/null +++ b/test/migrations/migration-9-test.js @@ -0,0 +1,140 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +chai.use(require('dirty-chai')) +const chaiAsPromised = require('chai-as-promised') +chai.use(chaiAsPromised) +const expect = chai.expect +const dagpb = require('ipld-dag-pb') +const { DAGNode, DAGLink } = dagpb +const multicodec = require('multicodec') +const multibase = require('multibase') +const all = require('it-all') +const cbor = require('cbor') + +const migration = require('../../migrations/migration-9') +const { createStore, cidToKey, PIN_DS_KEY, DEFAULT_FANOUT, PinTypes } = require('../../migrations/migration-9/utils') +const CID = require('cids') + +function keyToCid (key) { + const buf = Buffer.from(multibase.encoding('base32upper').decode(key.toString().substring(1))) + return new CID(buf) +} + +const pinnedCid = new CID('QmfGBRT6BbWJd7yUc2uYdaUZJBbnEFvTqehPFoSMQ6wgdr') +const pinRootCid = new CID('QmZ9ANfh6BMFoeinQU1WQ2BQrRea4UusEikQ1kupx3HtsY') + +// creates the pinset with the 'welcome to IPFS' files you get when you `jsipfs init` +async function bootstrapBlocks (blockstore, datastore) { + async function putNode (node, expectedCid) { + const buf = node.serialize() + const cid = await dagpb.util.cid(buf, { + cidVersion: 0, + hashAlg: multicodec.SHA2_256 + }) + expect(cid.toString()).to.equal(expectedCid) + await blockstore.put(cidToKey(cid), buf) + + return node.toDAGLink({ + name: '', + cidVersion: 0, + hashAlg: multicodec.SHA2_256 + }) + } + + const emptyBlock = await putNode( + new DAGNode(), + 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n' + ) + const bucket = new Array(DEFAULT_FANOUT).fill(0).map(() => new DAGLink('', 1, emptyBlock.Hash)) + const directLinks = await putNode( + new DAGNode(Buffer.from('CggBEIACHQAAAAA=', 'base64'), bucket), + 'QmbxHkprr5qdLSK8EZWdBzKFzNXGoKrxb7A4PHX3eH6JPp' + ) + const recursiveLinks = await putNode( + new DAGNode(Buffer.from('CggBEIACHQAAAAA=', 'base64'), [ + ...bucket, + new DAGLink('', 1, pinnedCid) + ]), + 'QmdEtks1KYQsrgJ8FXpP1vXygnVHSqnyFTKQ3wcWVd4D2y' + ) + + const pinRoot = await putNode( + new DAGNode(Buffer.alloc(0), [ + new DAGLink('direct', directLinks.Tsize, directLinks.Hash), + new DAGLink('recursive', recursiveLinks.Tsize, recursiveLinks.Hash) + ]), + pinRootCid.toString() + ) + + await blockstore.close() + await datastore.put(PIN_DS_KEY, pinRoot.Hash.multihash) + await datastore.close() +} + +module.exports = (setup, cleanup) => { + describe('migration 9', () => { + let dir + let datastore + let blockstore + let pinstore + + beforeEach(async () => { + dir = await setup() + + blockstore = await createStore(dir, 'blocks') + datastore = await createStore(dir, 'datastore') + pinstore = await createStore(dir, 'pins') + }) + + afterEach(async () => { + await pinstore.close() + await datastore.close() + await blockstore.close() + + cleanup(dir) + }) + + it('should migrate pins forward', async () => { + await bootstrapBlocks(blockstore, datastore) + await expect(datastore.has(PIN_DS_KEY)).to.eventually.be.true() + + await migration.migrate(dir) + await pinstore.open() + + const pins = await all(pinstore.query({})) + expect(pins).to.have.lengthOf(1) + + const key = pins[0].key + expect(keyToCid(key).toString()).to.equal(pinnedCid.toString()) + + const pin = cbor.decode(pins[0].value) + expect(pin.type).to.equal(PinTypes.recursive) + + await expect(datastore.has(PIN_DS_KEY)).to.eventually.be.false() + }) + + it('should migrate pins backward', async () => { + await pinstore.open() + pinstore.put(cidToKey(pinnedCid), cbor.encode({ + cid: pinnedCid.buffer, + type: PinTypes.recursive, + metadata: { + foo: 'bar' + } + })) + await pinstore.close() + + await migration.revert(dir) + + await datastore.open() + await expect(datastore.has(PIN_DS_KEY)).to.eventually.be.true() + + const buf = await datastore.get(PIN_DS_KEY) + const cid = new CID(buf) + + expect(cid.toString()).to.equal(pinRootCid.toString()) + }) + }) +} diff --git a/test/node.js b/test/node.js index 584275d..8c44fd8 100644 --- a/test/node.js +++ b/test/node.js @@ -45,6 +45,7 @@ describe('Node specific tests', () => { describe('migrations tests', () => { require('./migrations/migration-8-test')(createRepo, repoCleanup) + require('./migrations/migration-9-test')(createRepo, repoCleanup) }) describe('init tests', () => { From 85d7f4caf8e2b3a7ed87c944c76e7abab67ba0e7 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Mon, 6 Jul 2020 18:40:47 +0100 Subject: [PATCH 2/8] chore: update to latest pin spec --- migrations/migration-9/index.js | 46 +++++++++++++++++++---------- test/migrations/migration-9-test.js | 6 ++-- 2 files changed, 33 insertions(+), 19 deletions(-) diff --git a/migrations/migration-9/index.js b/migrations/migration-9/index.js index 9edecc0..12a7f66 100644 --- a/migrations/migration-9/index.js +++ b/migrations/migration-9/index.js @@ -4,6 +4,7 @@ const CID = require('cids') const dagpb = require('ipld-dag-pb') const cbor = require('cbor') const multicodec = require('multicodec') +const multibase = require('multibase') const pinset = require('./pin-set') const { createStore, cidToKey, PIN_DS_KEY, PinTypes } = require('./utils') @@ -15,17 +16,33 @@ async function pinsToDatastore (blockstore, datastore, pinstore) { const pinRoot = dagpb.util.deserialize(pinRootBuf) for await (const cid of pinset.loadSet(blockstore, pinRoot, PinTypes.recursive)) { - await pinstore.put(cidToKey(cid), cbor.encode({ - cid: cid.buffer, - type: PinTypes.recursive - })) + const pin = {} + + if (cid.version !== 0) { + pin.version = version + } + + if (cid.codec !== 'dag-pb') { + pin.codec = multicodec.getNumber(cid.codec) + } + + await pinstore.put(cidToKey(cid), cbor.encode(pin)) } for await (const cid of pinset.loadSet(blockstore, pinRoot, PinTypes.direct)) { - await pinstore.put(cidToKey(cid), cbor.encode({ - cid: cid.buffer, - type: PinTypes.direct - })) + const pin = { + depth: 1 + } + + if (cid.version !== 0) { + pin.version = version + } + + if (cid.codec !== 'dag-pb') { + pin.codec = multicodec.getNumber(cid.codec) + } + + await pinstore.put(cidToKey(cid), cbor.encode(pin)) } await blockstore.delete(cidToKey(cid)) @@ -36,15 +53,14 @@ async function pinsToDAG (blockstore, datastore, pinstore) { let recursivePins = [] let directPins = [] - for await (const { value } of pinstore.query({})) { + for await (const { key, value } of pinstore.query({})) { const pin = cbor.decode(value) + const cid = new CID(pin.version || 0, pin.codec && multicodec.getName(pin.codec) || 'dag-pb', multibase.decode('b' + key.toString().substring(1))) - if (pin.type === PinTypes.recursive) { - recursivePins.push(new CID(pin.cid)) - } - - if (pin.type === PinTypes.direct) { - directPins.push(new CID(pin.cid)) + if (pin.depth === 1) { + directPins.push(cid) + } else { + recursivePins.push(cid) } } diff --git a/test/migrations/migration-9-test.js b/test/migrations/migration-9-test.js index 51dc0f5..3018954 100644 --- a/test/migrations/migration-9-test.js +++ b/test/migrations/migration-9-test.js @@ -14,7 +14,7 @@ const all = require('it-all') const cbor = require('cbor') const migration = require('../../migrations/migration-9') -const { createStore, cidToKey, PIN_DS_KEY, DEFAULT_FANOUT, PinTypes } = require('../../migrations/migration-9/utils') +const { createStore, cidToKey, PIN_DS_KEY, DEFAULT_FANOUT } = require('../../migrations/migration-9/utils') const CID = require('cids') function keyToCid (key) { @@ -110,7 +110,7 @@ module.exports = (setup, cleanup) => { expect(keyToCid(key).toString()).to.equal(pinnedCid.toString()) const pin = cbor.decode(pins[0].value) - expect(pin.type).to.equal(PinTypes.recursive) + expect(pin.depth).to.be.undefined() await expect(datastore.has(PIN_DS_KEY)).to.eventually.be.false() }) @@ -118,8 +118,6 @@ module.exports = (setup, cleanup) => { it('should migrate pins backward', async () => { await pinstore.open() pinstore.put(cidToKey(pinnedCid), cbor.encode({ - cid: pinnedCid.buffer, - type: PinTypes.recursive, metadata: { foo: 'bar' } From 14082a976f91ba4d9352e4f63785d6679d2fc7bb Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 16 Jul 2020 20:59:19 +0100 Subject: [PATCH 3/8] chore: make direct pins depth = 0 --- migrations/migration-9/index.js | 4 +- test/browser.js | 1 + test/migrations/migration-9-test.js | 68 ++++++++++++++++++----------- 3 files changed, 45 insertions(+), 28 deletions(-) diff --git a/migrations/migration-9/index.js b/migrations/migration-9/index.js index 12a7f66..b62d913 100644 --- a/migrations/migration-9/index.js +++ b/migrations/migration-9/index.js @@ -31,7 +31,7 @@ async function pinsToDatastore (blockstore, datastore, pinstore) { for await (const cid of pinset.loadSet(blockstore, pinRoot, PinTypes.direct)) { const pin = { - depth: 1 + depth: 0 } if (cid.version !== 0) { @@ -57,7 +57,7 @@ async function pinsToDAG (blockstore, datastore, pinstore) { const pin = cbor.decode(value) const cid = new CID(pin.version || 0, pin.codec && multicodec.getName(pin.codec) || 'dag-pb', multibase.decode('b' + key.toString().substring(1))) - if (pin.depth === 1) { + if (pin.depth === 0) { directPins.push(cid) } else { recursivePins.push(cid) diff --git a/test/browser.js b/test/browser.js index a8f55dd..bae900c 100644 --- a/test/browser.js +++ b/test/browser.js @@ -46,6 +46,7 @@ describe('Browser specific tests', () => { describe('migrations tests', () => { require('./migrations/migration-8-test')(createRepo, repoCleanup) + require('./migrations/migration-9-test')(createRepo, repoCleanup) }) describe('init tests', () => { diff --git a/test/migrations/migration-9-test.js b/test/migrations/migration-9-test.js index 3018954..7e7c929 100644 --- a/test/migrations/migration-9-test.js +++ b/test/migrations/migration-9-test.js @@ -96,43 +96,59 @@ module.exports = (setup, cleanup) => { cleanup(dir) }) - it('should migrate pins forward', async () => { - await bootstrapBlocks(blockstore, datastore) - await expect(datastore.has(PIN_DS_KEY)).to.eventually.be.true() + describe('forwards', () => { + beforeEach(async () => { + await blockstore.open() + await bootstrapBlocks(blockstore, datastore) - await migration.migrate(dir) - await pinstore.open() + await datastore.open() + await expect(datastore.has(PIN_DS_KEY)).to.eventually.be.true() - const pins = await all(pinstore.query({})) - expect(pins).to.have.lengthOf(1) + await blockstore.close() + await datastore.close() + }) - const key = pins[0].key - expect(keyToCid(key).toString()).to.equal(pinnedCid.toString()) + it('should migrate pins forward', async () => { + await migration.migrate(dir) - const pin = cbor.decode(pins[0].value) - expect(pin.depth).to.be.undefined() + await pinstore.open() - await expect(datastore.has(PIN_DS_KEY)).to.eventually.be.false() + const pins = await all(pinstore.query({})) + expect(pins).to.have.lengthOf(1) + + const key = pins[0].key + expect(keyToCid(key).toString()).to.equal(pinnedCid.toString()) + + const pin = cbor.decode(pins[0].value) + expect(pin.depth).to.be.undefined() + + await datastore.open() + await expect(datastore.has(PIN_DS_KEY)).to.eventually.be.false() + }) }) - it('should migrate pins backward', async () => { - await pinstore.open() - pinstore.put(cidToKey(pinnedCid), cbor.encode({ - metadata: { - foo: 'bar' - } - })) - await pinstore.close() + describe('backwards', () => { + beforeEach(async () => { + await pinstore.open() + pinstore.put(cidToKey(pinnedCid), cbor.encode({ + metadata: { + foo: 'bar' + } + })) + await pinstore.close() + }) - await migration.revert(dir) + it('should migrate pins backward', async () => { + await migration.revert(dir) - await datastore.open() - await expect(datastore.has(PIN_DS_KEY)).to.eventually.be.true() + await datastore.open() + await expect(datastore.has(PIN_DS_KEY)).to.eventually.be.true() - const buf = await datastore.get(PIN_DS_KEY) - const cid = new CID(buf) + const buf = await datastore.get(PIN_DS_KEY) + const cid = new CID(buf) - expect(cid.toString()).to.equal(pinRootCid.toString()) + expect(cid.toString()).to.equal(pinRootCid.toString()) + }) }) }) } From d28eb2c31685dac1e4365646cfa3de59ac0222c3 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 16 Jul 2020 21:57:48 +0100 Subject: [PATCH 4/8] chore: add sharding tests --- test/browser.js | 5 +-- test/migrations/index.js | 65 +++++++++++++++++++++++++++++ test/migrations/migration-8-test.js | 53 +++++++---------------- test/migrations/migration-9-test.js | 15 ++++--- test/node.js | 5 +-- 5 files changed, 92 insertions(+), 51 deletions(-) create mode 100644 test/migrations/index.js diff --git a/test/browser.js b/test/browser.js index bae900c..5ea35cd 100644 --- a/test/browser.js +++ b/test/browser.js @@ -44,9 +44,8 @@ describe('Browser specific tests', () => { require('./version-test')(createRepo, repoCleanup) }) - describe('migrations tests', () => { - require('./migrations/migration-8-test')(createRepo, repoCleanup) - require('./migrations/migration-9-test')(createRepo, repoCleanup) + describe('migration tests', () => { + require('./migrations')(createRepo, repoCleanup) }) describe('init tests', () => { diff --git a/test/migrations/index.js b/test/migrations/index.js new file mode 100644 index 0000000..895d70c --- /dev/null +++ b/test/migrations/index.js @@ -0,0 +1,65 @@ +/* eslint-env mocha */ +'use strict' + +const CONFIGURATIONS = [{ + name: 'with sharding', + options: { + storageBackendOptions: { + root: { + sharding: true, + extension: '.data' + }, + blocks: { + sharding: true, + extension: '.data' + }, + datastore: { + sharding: true, + extension: '.data' + }, + keys: { + sharding: true, + extension: '.data' + }, + pinstore: { + sharding: true, + extension: '.data' + } + } + } +}, { + name: 'without sharding', + options: { + storageBackendOptions: { + root: { + sharding: false, + extension: '.data' + }, + blocks: { + sharding: false, + extension: '.data' + }, + datastore: { + sharding: false, + extension: '.data' + }, + keys: { + sharding: false, + extension: '.data' + }, + pinstore: { + sharding: false, + extension: '.data' + } + } + } +}] + +module.exports = (createRepo, repoCleanup) => { + CONFIGURATIONS.forEach(({ name, options }) => { + describe(name, () => { + require('./migration-8-test')(createRepo, repoCleanup, options) + require('./migration-9-test')(createRepo, repoCleanup, options) + }) + }) +} diff --git a/test/migrations/migration-8-test.js b/test/migrations/migration-8-test.js index b976837..c75c606 100644 --- a/test/migrations/migration-8-test.js +++ b/test/migrations/migration-8-test.js @@ -101,49 +101,28 @@ async function validateBlocks (dir, shouldBeEncoded, options) { await baseStore.close() } -const CONFIGURATIONS = [{ - name: 'with block sharding', - options: { - storageBackendOptions: { - blocks: { - sharding: true, - extension: '.data' - } - } - } -}, { - name: 'without block sharding', - options: { - storageBackendOptions: { - blocks: { - sharding: false, - extension: '.data' - } - } - } -}] - -module.exports = (setup, cleanup) => { +module.exports = (setup, cleanup, options) => { describe('migration 8', () => { let dir beforeEach(async () => { dir = await setup() }) - afterEach(() => cleanup(dir)) - - CONFIGURATIONS.forEach(({ name, options }) => { - it(`should migrate blocks forward ${name}`, async () => { - await bootstrapBlocks(dir, false, options.storageBackendOptions.blocks) - await migration.migrate(dir, options) - await validateBlocks(dir, true, options.storageBackendOptions.blocks) - }) - - it(`should migrate blocks backward ${name}`, async () => { - await bootstrapBlocks(dir, true, options.storageBackendOptions.blocks) - await migration.revert(dir, options) - await validateBlocks(dir, false, options.storageBackendOptions.blocks) - }) + + afterEach(async () => { + await cleanup(dir) + }) + + it('should migrate blocks forward', async () => { + await bootstrapBlocks(dir, false, options.storageBackendOptions.blocks) + await migration.migrate(dir, options) + await validateBlocks(dir, true, options.storageBackendOptions.blocks) + }) + + it('should migrate blocks backward', async () => { + await bootstrapBlocks(dir, true, options.storageBackendOptions.blocks) + await migration.revert(dir, options) + await validateBlocks(dir, false, options.storageBackendOptions.blocks) }) }) } diff --git a/test/migrations/migration-9-test.js b/test/migrations/migration-9-test.js index 7e7c929..7f20b3d 100644 --- a/test/migrations/migration-9-test.js +++ b/test/migrations/migration-9-test.js @@ -18,7 +18,7 @@ const { createStore, cidToKey, PIN_DS_KEY, DEFAULT_FANOUT } = require('../../mig const CID = require('cids') function keyToCid (key) { - const buf = Buffer.from(multibase.encoding('base32upper').decode(key.toString().substring(1))) + const buf = Buffer.from(multibase.encoding('base32upper').decode(key.toString().split('/').pop())) return new CID(buf) } @@ -73,7 +73,7 @@ async function bootstrapBlocks (blockstore, datastore) { await datastore.close() } -module.exports = (setup, cleanup) => { +module.exports = (setup, cleanup, options) => { describe('migration 9', () => { let dir let datastore @@ -83,9 +83,9 @@ module.exports = (setup, cleanup) => { beforeEach(async () => { dir = await setup() - blockstore = await createStore(dir, 'blocks') - datastore = await createStore(dir, 'datastore') - pinstore = await createStore(dir, 'pins') + blockstore = await createStore(dir, 'blocks', options) + datastore = await createStore(dir, 'datastore', options) + pinstore = await createStore(dir, 'pins', options) }) afterEach(async () => { @@ -109,7 +109,7 @@ module.exports = (setup, cleanup) => { }) it('should migrate pins forward', async () => { - await migration.migrate(dir) + await migration.migrate(dir, options) await pinstore.open() @@ -139,14 +139,13 @@ module.exports = (setup, cleanup) => { }) it('should migrate pins backward', async () => { - await migration.revert(dir) + await migration.revert(dir, options) await datastore.open() await expect(datastore.has(PIN_DS_KEY)).to.eventually.be.true() const buf = await datastore.get(PIN_DS_KEY) const cid = new CID(buf) - expect(cid.toString()).to.equal(pinRootCid.toString()) }) }) diff --git a/test/node.js b/test/node.js index 8c44fd8..f515c96 100644 --- a/test/node.js +++ b/test/node.js @@ -43,9 +43,8 @@ describe('Node specific tests', () => { require('./version-test')(createRepo, repoCleanup) }) - describe('migrations tests', () => { - require('./migrations/migration-8-test')(createRepo, repoCleanup) - require('./migrations/migration-9-test')(createRepo, repoCleanup) + describe('migration tests', () => { + require('./migrations')(createRepo, repoCleanup) }) describe('init tests', () => { From e6e65dd5496713d1b006a2936e11663fb96288ba Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 16 Jul 2020 22:19:42 +0100 Subject: [PATCH 5/8] chore: add missing await --- migrations/migration-9/index.js | 2 +- test/migrations/migration-9-test.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/migrations/migration-9/index.js b/migrations/migration-9/index.js index b62d913..e9d1785 100644 --- a/migrations/migration-9/index.js +++ b/migrations/migration-9/index.js @@ -55,7 +55,7 @@ async function pinsToDAG (blockstore, datastore, pinstore) { for await (const { key, value } of pinstore.query({})) { const pin = cbor.decode(value) - const cid = new CID(pin.version || 0, pin.codec && multicodec.getName(pin.codec) || 'dag-pb', multibase.decode('b' + key.toString().substring(1))) + const cid = new CID(pin.version || 0, pin.codec && multicodec.getName(pin.codec) || 'dag-pb', multibase.decode('b' + key.toString().split('/').pop())) if (pin.depth === 0) { directPins.push(cid) diff --git a/test/migrations/migration-9-test.js b/test/migrations/migration-9-test.js index 7f20b3d..9a60b36 100644 --- a/test/migrations/migration-9-test.js +++ b/test/migrations/migration-9-test.js @@ -130,7 +130,7 @@ module.exports = (setup, cleanup, options) => { describe('backwards', () => { beforeEach(async () => { await pinstore.open() - pinstore.put(cidToKey(pinnedCid), cbor.encode({ + await pinstore.put(cidToKey(pinnedCid), cbor.encode({ metadata: { foo: 'bar' } From 495cc7a198ab995a9be34c381ed04f8ef9d0bdfd Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 17 Jul 2020 07:05:06 +0100 Subject: [PATCH 6/8] chore: update versions matrix --- README.md | 51 +++++++++++++++++++++++++++++++-------------------- 1 file changed, 31 insertions(+), 20 deletions(-) diff --git a/README.md b/README.md index eaf75e2..6963bb2 100644 --- a/README.md +++ b/README.md @@ -21,26 +21,35 @@ This package is inspired by the [go-ipfs repo migration tool](https://github.com ## Table of Contents -- [Background](#background) -- [Install](#install) - - [npm](#npm) - - [Use in Node.js](#use-in-nodejs) - - [Use in a browser with browserify, webpack or any other bundler](#use-in-a-browser-with-browserify-webpack-or-any-other-bundler) - - [Use in a browser Using a script tag](#use-in-a-browser-using-a-script-tag) -- [Usage](#usage) -- [API](#api) -- [CLI](#cli) -- [Creating a new migration](#creating-a-new-migration) - - [Architecture of a migration](#architecture-of-a-migration) - - [Browser vs. NodeJS environments](#browser-vs-nodejs-environments) - - [Guidelines](#guidelines) - - [Integration with js-ipfs](#integration-with-js-ipfs) - - [Empty migrations](#empty-migrations) - - [Migrations matrix](#migrations-matrix) -- [Developer](#developer) +- [Migration tool for JS IPFS Repo](#migration-tool-for-js-ipfs-repo) + - [Lead Maintainer](#lead-maintainer) + - [Table of Contents](#table-of-contents) + - [Background](#background) + - [Install](#install) + - [npm](#npm) + - [Use in Node.js](#use-in-nodejs) + - [Use in a browser with browserify, webpack or any other bundler](#use-in-a-browser-with-browserify-webpack-or-any-other-bundler) + - [Usage](#usage) + - [API](#api) + - [`.migrate(path, toVersion, {ignoreLock, repoOptions, onProgress, isDryRun}) -> Promise`](#migratepath-toversion-ignorelock-repooptions-onprogress-isdryrun---promisevoid) + - [`onProgress(migration, counter, totalMigrations)`](#onprogressmigration-counter-totalmigrations) + - [`.revert(path, toVersion, {ignoreLock, repoOptions, onProgress, isDryRun}) -> Promise`](#revertpath-toversion-ignorelock-repooptions-onprogress-isdryrun---promisevoid) + - [`getLatestMigrationVersion() -> int`](#getlatestmigrationversion---int) + - [CLI](#cli) + - [Creating a new migration](#creating-a-new-migration) + - [Architecture of a migration](#architecture-of-a-migration) + - [`.migrate(repoPath, repoOptions)`](#migraterepopath-repooptions) + - [`.revert(repoPath, repoOptions)`](#revertrepopath-repooptions) + - [Browser vs. NodeJS environments](#browser-vs-nodejs-environments) + - [Guidelines](#guidelines) + - [Integration with js-ipfs](#integration-with-js-ipfs) + - [Tests](#tests) + - [Empty migrations](#empty-migrations) + - [Migrations matrix](#migrations-matrix) + - [Developer](#developer) - [Module versioning notes](#module-versioning-notes) -- [Contribute](#contribute) -- [License](#license) + - [Contribute](#contribute) + - [License](#license) ## Background @@ -266,7 +275,9 @@ This will create an empty migration with the next version. | IPFS repo version | JS IPFS version | | -----------------: |:----------------:| -| 7 | v0.0.0 - latest | +| 7 | v0.0.0 | +| 8 | v0.48.0 | +| 9 | v0.49.0 | ## Developer From e3131c564f75e3770807b4fba501e06717856b3f Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 17 Jul 2020 07:46:54 +0100 Subject: [PATCH 7/8] chore: update readme --- README.md | 69 ++++++++++++++++++++++++++----------------------------- 1 file changed, 33 insertions(+), 36 deletions(-) diff --git a/README.md b/README.md index 7f5909d..edee377 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Migration tool for JS IPFS Repo +# Migration tool for JS IPFS Repo [![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfs-repo-migrations)](https://travis-ci.com/ipfs/js-ipfs-repo-migrations) [![codecov](https://codecov.io/gh/ipfs/js-ipfs-repo-migrations/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-repo-migrations) @@ -15,41 +15,38 @@ This package is inspired by the [go-ipfs repo migration tool](https://github.com/ipfs/fs-repo-migrations/) -## Lead Maintainer - -[Adam Uhlíř](https://github.com/auhau/) - -## Table of Contents - -- [Migration tool for JS IPFS Repo](#migration-tool-for-js-ipfs-repo) - - [Lead Maintainer](#lead-maintainer) - - [Table of Contents](#table-of-contents) - - [Background](#background) - - [Install](#install) - - [npm](#npm) - - [Use in Node.js](#use-in-nodejs) - - [Use in a browser with browserify, webpack or any other bundler](#use-in-a-browser-with-browserify-webpack-or-any-other-bundler) - - [Usage](#usage) - - [API](#api) - - [`.migrate(path, toVersion, {ignoreLock, repoOptions, onProgress, isDryRun}) -> Promise`](#migratepath-toversion-ignorelock-repooptions-onprogress-isdryrun---promisevoid) - - [`onProgress(migration, counter, totalMigrations)`](#onprogressmigration-counter-totalmigrations) - - [`.revert(path, toVersion, {ignoreLock, repoOptions, onProgress, isDryRun}) -> Promise`](#revertpath-toversion-ignorelock-repooptions-onprogress-isdryrun---promisevoid) - - [`getLatestMigrationVersion() -> int`](#getlatestmigrationversion---int) - - [CLI](#cli) - - [Creating a new migration](#creating-a-new-migration) - - [Architecture of a migration](#architecture-of-a-migration) - - [`.migrate(repoPath, repoOptions)`](#migraterepopath-repooptions) - - [`.revert(repoPath, repoOptions)`](#revertrepopath-repooptions) - - [Browser vs. NodeJS environments](#browser-vs-nodejs-environments) - - [Guidelines](#guidelines) - - [Integration with js-ipfs](#integration-with-js-ipfs) - - [Tests](#tests) - - [Empty migrations](#empty-migrations) - - [Migrations matrix](#migrations-matrix) - - [Developer](#developer) - - [Module versioning notes](#module-versioning-notes) - - [Contribute](#contribute) - - [License](#license) +## Lead Maintainer + +[Alex Potsides](http://github.com/achingbrain) + +## Table of Contents + +- [Background](#background) +- [Install](#install) + - [npm](#npm) + - [Use in Node.js](#use-in-nodejs) + - [Use in a browser with browserify, webpack or any other bundler](#use-in-a-browser-with-browserify-webpack-or-any-other-bundler) +- [Usage](#usage) +- [API](#api) + - [`.migrate(path, toVersion, {ignoreLock, repoOptions, onProgress, isDryRun}) -> Promise`](#migratepath-toversion-ignorelock-repooptions-onprogress-isdryrun---promisevoid) + - [`onProgress(migration, counter, totalMigrations)`](#onprogressmigration-counter-totalmigrations) + - [`.revert(path, toVersion, {ignoreLock, repoOptions, onProgress, isDryRun}) -> Promise`](#revertpath-toversion-ignorelock-repooptions-onprogress-isdryrun---promisevoid) + - [`getLatestMigrationVersion() -> int`](#getlatestmigrationversion---int) +- [CLI](#cli) +- [Creating a new migration](#creating-a-new-migration) + - [Architecture of a migration](#architecture-of-a-migration) + - [`.migrate(repoPath, repoOptions)`](#migraterepopath-repooptions) + - [`.revert(repoPath, repoOptions)`](#revertrepopath-repooptions) + - [Browser vs. NodeJS environments](#browser-vs-nodejs-environments) + - [Guidelines](#guidelines) + - [Integration with js-ipfs](#integration-with-js-ipfs) + - [Tests](#tests) + - [Empty migrations](#empty-migrations) + - [Migrations matrix](#migrations-matrix) +- [Developer](#developer) + - [Module versioning notes](#module-versioning-notes) +- [Contribute](#contribute) +- [License](#license) ## Background From 3461e7ce386e0a1eb7db07012037850398b073fe Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 17 Jul 2020 08:55:40 +0100 Subject: [PATCH 8/8] chore: update node polyfills --- .aegir.js | 5 ++++- migrations/migration-9/utils.js | 3 +-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.aegir.js b/.aegir.js index f176614..4d0b129 100644 --- a/.aegir.js +++ b/.aegir.js @@ -4,7 +4,10 @@ module.exports = { webpack: { node: { // this is needed until level stops using node buffers in browser code - Buffer: true + Buffer: true, + + // needed by cbor, binary-parse-stream and nofilter + stream: true } } } diff --git a/migrations/migration-9/utils.js b/migrations/migration-9/utils.js index 22b624f..893bba8 100644 --- a/migrations/migration-9/utils.js +++ b/migrations/migration-9/utils.js @@ -1,6 +1,5 @@ 'use strict' -const path = require('path') const core = require('datastore-core') const ShardingStore = core.ShardingDatastore const utils = require('../../src/utils') @@ -38,7 +37,7 @@ async function maybeWithSharding (filestore, options) { const createStore = async (location, name, options) => { const { StorageBackend, storageOptions } = utils.getDatastoreAndOptions(options, name) - let store = new StorageBackend(path.join(location, name), storageOptions) + let store = new StorageBackend(`${location}/${name}`, storageOptions) store = maybeWithSharding(store, storageOptions) return store