diff --git a/package.json b/package.json index 2f86566..bcc6e95 100644 --- a/package.json +++ b/package.json @@ -44,9 +44,9 @@ "chai": "^4.2.0", "detect-node": "^2.0.4", "dirty-chai": "^2.0.1", - "ipld": "~0.24.0", - "ipld-dag-pb": "~0.17.1", - "ipld-in-memory": "^2.0.0", + "ipld": "^0.25.0", + "ipld-dag-pb": "^0.18.0", + "ipld-in-memory": "^3.0.0", "multicodec": "~0.5.1", "multihashes": "~0.4.14", "nyc": "^14.0.0", diff --git a/test/exporter-sharded.spec.js b/test/exporter-sharded.spec.js index fcfab68..1a66215 100644 --- a/test/exporter-sharded.spec.js +++ b/test/exporter-sharded.spec.js @@ -46,14 +46,8 @@ describe('exporter sharded', function () { }))).cid } - before((done) => { - inMemory(IPLD, (err, resolver) => { - expect(err).to.not.exist() - - ipld = resolver - - done() - }) + before(async () => { + ipld = await inMemory(IPLD) }) it('exports a sharded directory', async () => { @@ -190,7 +184,7 @@ describe('exporter sharded', function () { it('exports a file from a sharded directory inside a regular directory inside a sharded directory', async () => { const dirCid = await createShard(15) - const node = await DAGNode.create(new UnixFS('directory').marshal(), [ + const node = new DAGNode(new UnixFS('directory').marshal(), [ new DAGLink('shard', 5, dirCid) ]) const nodeCid = await ipld.put(node, mc.DAG_PB, { @@ -198,7 +192,7 @@ describe('exporter sharded', function () { hashAlg: mh.names['sha2-256'] }) - const shardNode = await DAGNode.create(new UnixFS('hamt-sharded-directory').marshal(), [ + const shardNode = new DAGNode(new UnixFS('hamt-sharded-directory').marshal(), [ new DAGLink('75normal-dir', 5, nodeCid) ]) const shardNodeCid = await ipld.put(shardNode, mc.DAG_PB, { diff --git a/test/exporter-subtree.spec.js b/test/exporter-subtree.spec.js index 2d1e37b..3290607 100644 --- a/test/exporter-subtree.spec.js +++ b/test/exporter-subtree.spec.js @@ -19,14 +19,8 @@ const exporter = require('./../src') describe('exporter subtree', () => { let ipld - before((done) => { - inMemory(IPLD, (err, resolver) => { - expect(err).to.not.exist() - - ipld = resolver - - done() - }) + before(async () => { + ipld = await inMemory(IPLD) }) it('exports a file 2 levels down', async () => { diff --git a/test/exporter.spec.js b/test/exporter.spec.js index 48459ae..a82b6b6 100644 --- a/test/exporter.spec.js +++ b/test/exporter.spec.js @@ -40,7 +40,7 @@ describe('exporter', () => { const file = new UnixFS(options.type, options.content) - const node = await DAGNode.create(file.marshal(), options.links) + const node = new DAGNode(file.marshal(), options.links) const cid = await ipld.put(node, mc.DAG_PB, { cidVersion: 0, hashAlg: mh.names['sha2-256'] @@ -104,7 +104,7 @@ describe('exporter', () => { links.push(new DAGLink('', child.node.size, child.cid)) } - const node = await DAGNode.create(file.marshal(), links) + const node = new DAGNode(file.marshal(), links) const cid = await ipld.put(node, mc.DAG_PB, { cidVersion: 1, hashAlg: mh.names['sha2-256'] @@ -116,14 +116,8 @@ describe('exporter', () => { } } - before((done) => { - inMemory(IPLD, (err, resolver) => { - expect(err).to.not.exist() - - ipld = resolver - - done() - }) + before(async () => { + ipld = await inMemory(IPLD) }) it('ensure hash inputs are sanitized', async () => { @@ -197,14 +191,14 @@ describe('exporter', () => { it('exports a small file with links', async () => { const content = Buffer.from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]) const chunk1 = new UnixFS('raw', content.slice(0, 5)) - const chunkNode1 = await DAGNode.create(chunk1.marshal()) + const chunkNode1 = new DAGNode(chunk1.marshal()) const chunkCid1 = await ipld.put(chunkNode1, mc.DAG_PB, { cidVersion: 0, hashAlg: mh.names['sha2-256'] }) const chunk2 = new UnixFS('raw', content.slice(5)) - const chunkNode2 = await DAGNode.create(chunk2.marshal()) + const chunkNode2 = new DAGNode(chunk2.marshal()) const chunkCid2 = await ipld.put(chunkNode2, mc.DAG_PB, { cidVersion: 0, hashAlg: mh.names['sha2-256'] @@ -214,7 +208,7 @@ describe('exporter', () => { file.addBlockSize(5) file.addBlockSize(5) - const fileNode = await DAGNode.create(file.marshal(), [ + const fileNode = new DAGNode(file.marshal(), [ new DAGLink('', chunkNode1.size, chunkCid1), new DAGLink('', chunkNode2.size, chunkCid2) ]) @@ -822,7 +816,7 @@ describe('exporter', () => { }) it('errors we export a non-unixfs dag-pb node', async () => { - const cid = await ipld.put(await DAGNode.create(Buffer.from([0, 1, 2, 3, 4])), mc.DAG_PB) + const cid = await ipld.put(new DAGNode(Buffer.from([0, 1, 2, 3, 4])), mc.DAG_PB) try { await exporter(cid, ipld) @@ -839,7 +833,7 @@ describe('exporter', () => { const file = new UnixFS('file') file.addBlockSize(100) - const cid = await ipld.put(await DAGNode.create(file.marshal(), [ + const cid = await ipld.put(new DAGNode(file.marshal(), [ new DAGLink('', 100, cborNodeCid) ]), mc.DAG_PB)