Skip to content
This repository was archived by the owner on Aug 12, 2020. It is now read-only.

Commit 8778c81

Browse files
committed
feat: migrate exporter to use IPLD Resolver
1 parent bffeaad commit 8778c81

File tree

6 files changed

+38
-29
lines changed

6 files changed

+38
-29
lines changed

package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
"name": "ipfs-unixfs-engine",
33
"version": "0.11.4",
44
"description": "JavaScript implementation of the unixfs Engine used by IPFS",
5-
"main": "lib/index.js",
5+
"main": "src/index.js",
66
"jsnext:main": "src/index.js",
77
"scripts": {
88
"lint": "aegir-lint",

src/exporter.js

+7-6
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
const traverse = require('pull-traverse')
44
const pull = require('pull-stream')
5+
const CID = require('cids')
56

67
const util = require('./util')
78
const switchType = util.switchType
@@ -10,29 +11,29 @@ const cleanMultihash = util.cleanMultihash
1011
const dirExporter = require('./exporters/dir')
1112
const fileExporter = require('./exporters/file')
1213

13-
module.exports = (hash, dagService, options) => {
14+
module.exports = (hash, ipldResolver, options) => {
1415
hash = cleanMultihash(hash)
1516
options = options || {}
1617

1718
function visitor (item) {
1819
return pull(
19-
dagService.getStream(item.hash),
20+
ipldResolver.getStream(new CID(item.hash)),
2021
pull.map((node) => switchType(
2122
node,
22-
() => dirExporter(node, item.path, dagService),
23-
() => fileExporter(node, item.path, dagService)
23+
() => dirExporter(node, item.path, ipldResolver),
24+
() => fileExporter(node, item.path, ipldResolver)
2425
)),
2526
pull.flatten()
2627
)
2728
}
2829

2930
// Traverse the DAG
3031
return pull(
31-
dagService.getStream(hash),
32+
ipldResolver.getStream(new CID(hash)),
3233
pull.map((node) => switchType(
3334
node,
3435
() => traverse.widthFirst({path: hash, hash}, visitor),
35-
() => fileExporter(node, hash, dagService)
36+
() => fileExporter(node, hash, ipldResolver)
3637
)),
3738
pull.flatten()
3839
)

src/exporters/dir.js

+4-3
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,13 @@
33
const path = require('path')
44
const pull = require('pull-stream')
55
const paramap = require('pull-paramap')
6+
const CID = require('cids')
67

78
const fileExporter = require('./file')
89
const switchType = require('../util').switchType
910

1011
// Logic to export a unixfs directory.
11-
module.exports = (node, name, dagService) => {
12+
module.exports = (node, name, ipldResolver) => {
1213
// The algorithm below is as follows
1314
//
1415
// 1. Take all links from a given directory node
@@ -25,15 +26,15 @@ module.exports = (node, name, dagService) => {
2526
path: path.join(name, link.name),
2627
hash: link.hash
2728
})),
28-
paramap((item, cb) => dagService.get(item.hash, (err, n) => {
29+
paramap((item, cb) => ipldResolver.get(new CID(item.hash), (err, n) => {
2930
if (err) {
3031
return cb(err)
3132
}
3233

3334
cb(null, switchType(
3435
n,
3536
() => pull.values([item]),
36-
() => fileExporter(n, item.path, dagService)
37+
() => fileExporter(n, item.path, ipldResolver)
3738
))
3839
})),
3940
pull.flatten()

src/exporters/file.js

+3-2
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,12 @@
22

33
const traverse = require('pull-traverse')
44
const UnixFS = require('ipfs-unixfs')
5+
const CID = require('cids')
56
const pull = require('pull-stream')
67
const paramap = require('pull-paramap')
78

89
// Logic to export a single (possibly chunked) unixfs file.
9-
module.exports = (node, name, ds) => {
10+
module.exports = (node, name, ipldResolver) => {
1011
function getData (node) {
1112
try {
1213
const file = UnixFS.unmarshal(node.data)
@@ -19,7 +20,7 @@ module.exports = (node, name, ds) => {
1920
function visitor (node) {
2021
return pull(
2122
pull.values(node.links),
22-
paramap((link, cb) => ds.get(link.hash, cb))
23+
paramap((link, cb) => ipldResolver.get(new CID(link.hash), cb))
2324
)
2425
}
2526

test/test-exporter.js

+22-16
Original file line numberDiff line numberDiff line change
@@ -3,38 +3,42 @@
33

44
const expect = require('chai').expect
55
const BlockService = require('ipfs-block-service')
6-
const DAGService = require('ipld-resolver')
6+
const IPLDResolver = require('ipld-resolver')
77
const UnixFS = require('ipfs-unixfs')
88
const fs = require('fs')
99
const path = require('path')
1010
const bs58 = require('bs58')
1111
const pull = require('pull-stream')
1212
const zip = require('pull-zip')
13+
const CID = require('cids')
1314

1415
const unixFSEngine = require('./../src')
1516
const exporter = unixFSEngine.exporter
1617

1718
module.exports = (repo) => {
1819
describe('exporter', () => {
19-
let ds
20+
let ipldResolver
2021

2122
const bigFile = fs.readFileSync(path.join(__dirname, '/test-data/1.2MiB.txt'))
2223
before(() => {
2324
const bs = new BlockService(repo)
24-
ds = new DAGService(bs)
25+
ipldResolver = new IPLDResolver(bs)
2526
})
2627

2728
it('import and export', (done) => {
2829
pull(
2930
pull.values([{
3031
path: '1.2MiB.txt',
31-
content: pull.values([bigFile, Buffer('hello world')])
32+
content: pull.values([
33+
bigFile,
34+
Buffer('hello world')
35+
])
3236
}]),
33-
unixFSEngine.importer(ds),
37+
unixFSEngine.importer(ipldResolver),
3438
pull.map((file) => {
3539
expect(file.path).to.be.eql('1.2MiB.txt')
3640

37-
return exporter(file.multihash, ds)
41+
return exporter(file.multihash, ipldResolver)
3842
}),
3943
pull.flatten(),
4044
pull.collect((err, files) => {
@@ -50,15 +54,15 @@ module.exports = (repo) => {
5054
const mhBuf = new Buffer(bs58.decode(hash))
5155

5256
pull(
53-
ds.getStream(hash),
57+
ipldResolver.getStream(new CID(hash)),
5458
pull.map((node) => UnixFS.unmarshal(node.data)),
5559
pull.collect((err, nodes) => {
5660
expect(err).to.not.exist
5761

5862
const unmarsh = nodes[0]
5963

6064
pull(
61-
exporter(mhBuf, ds),
65+
exporter(mhBuf, ipldResolver),
6266
pull.collect(onFiles)
6367
)
6468

@@ -79,10 +83,10 @@ module.exports = (repo) => {
7983
pull(
8084
zip(
8185
pull(
82-
ds.getStream(hash),
86+
ipldResolver.getStream(new CID(hash)),
8387
pull.map((node) => UnixFS.unmarshal(node.data))
8488
),
85-
exporter(hash, ds)
89+
exporter(hash, ipldResolver)
8690
),
8791
pull.collect((err, values) => {
8892
expect(err).to.not.exist
@@ -97,7 +101,7 @@ module.exports = (repo) => {
97101
it('export a small file with links', (done) => {
98102
const hash = 'QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q'
99103
pull(
100-
exporter(hash, ds),
104+
exporter(hash, ipldResolver),
101105
pull.collect((err, files) => {
102106
expect(err).to.not.exist
103107

@@ -109,7 +113,7 @@ module.exports = (repo) => {
109113
it('export a large file > 5mb', (done) => {
110114
const hash = 'QmRQgufjp9vLE8XK2LGKZSsPCFCF6e4iynCQtNB5X2HBKE'
111115
pull(
112-
exporter(hash, ds),
116+
exporter(hash, ipldResolver),
113117
pull.collect((err, files) => {
114118
expect(err).to.not.exist
115119

@@ -123,7 +127,7 @@ module.exports = (repo) => {
123127
const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN'
124128

125129
pull(
126-
exporter(hash, ds),
130+
exporter(hash, ipldResolver),
127131
pull.collect((err, files) => {
128132
expect(err).to.not.exist
129133

@@ -162,7 +166,7 @@ module.exports = (repo) => {
162166
const hash = 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'
163167

164168
pull(
165-
exporter(hash, ds),
169+
exporter(hash, ipldResolver),
166170
pull.collect((err, files) => {
167171
expect(err).to.not.exist
168172
expect(files[0].content).to.not.exist
@@ -176,7 +180,7 @@ module.exports = (repo) => {
176180
const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKj3'
177181

178182
pull(
179-
exporter(hash, ds),
183+
exporter(hash, ipldResolver),
180184
pull.collect((err, files) => {
181185
expect(err).to.exist
182186
done()
@@ -190,7 +194,9 @@ function fileEql (f1, f2, done) {
190194
pull(
191195
f1.content,
192196
pull.collect((err, data) => {
193-
if (err) return done(err)
197+
if (err) {
198+
return done(err)
199+
}
194200

195201
try {
196202
if (f2) {

test/test-importer.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ function stringifyMh (files) {
1818
}
1919

2020
module.exports = function (repo) {
21-
describe.only('importer', function () {
21+
describe('importer', function () {
2222
let ipldResolver
2323

2424
const bigFile = fs.readFileSync(path.join(__dirname, '/test-data/1.2MiB.txt'))

0 commit comments

Comments
 (0)