Skip to content
This repository was archived by the owner on Aug 12, 2020. It is now read-only.

Commit ef823e7

Browse files
dignifiedquiredaviddias
authored andcommitted
refactor: move to pull-streams
1 parent ec52fec commit ef823e7

File tree

133 files changed

+856
-811
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

133 files changed

+856
-811
lines changed

.travis.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ sudo: false
22
language: node_js
33
node_js:
44
- 4
5-
- 5
5+
- stable
66

77
# Make sure we have new NPM.
88
before_install:

package.json

+22-28
Original file line numberDiff line numberDiff line change
@@ -2,14 +2,14 @@
22
"name": "ipfs-unixfs-engine",
33
"version": "0.10.2",
44
"description": "JavaScript implementation of the unixfs Engine used by IPFS",
5-
"main": "src/index.js",
5+
"main": "lib/index.js",
66
"jsnext:main": "src/index.js",
77
"scripts": {
88
"lint": "aegir-lint",
99
"build": "aegir-build",
1010
"test": "aegir-test",
11-
"test:node": "aegir-test node",
12-
"test:browser": "aegir-test browser",
11+
"test:node": "aegir-test --env node",
12+
"test:browser": "aegir-test --env browser",
1313
"release": "aegir-release",
1414
"release-minor": "aegir-release --type minor",
1515
"release-major": "aegir-release --type major",
@@ -22,7 +22,7 @@
2222
],
2323
"repository": {
2424
"type": "git",
25-
"url": "git+https://github.com/ipfs/js-ipfs-data-importing.git"
25+
"url": "git+https://github.com/ipfs/js-ipfs-unixfs-engine.git"
2626
},
2727
"keywords": [
2828
"IPFS"
@@ -34,37 +34,31 @@
3434
},
3535
"homepage": "https://github.com/ipfs/js-ipfs-unixfs-engineg#readme",
3636
"devDependencies": {
37-
"aegir": "^6.0.1",
38-
"async": "^2.0.1",
39-
"block-stream2": "^1.1.0",
37+
"aegir": "^8.0.0",
4038
"buffer-loader": "0.0.1",
4139
"chai": "^3.5.0",
42-
"concat-stream": "^1.5.1",
43-
"fs-blob-store": "^5.2.1",
44-
"idb-plus-blob-store": "^1.1.2",
45-
"ipfs-repo": "^0.7.5",
40+
"fs-pull-blob-store": "^0.3.0",
41+
"idb-pull-blob-store": "^0.4.0",
42+
"ipfs-block-service": "^0.5.0",
43+
"ipfs-repo": "^0.9.0",
4644
"ncp": "^2.0.0",
47-
"pre-commit": "^1.1.2",
45+
"pre-commit": "^1.1.3",
46+
"pull-zip": "^2.0.0",
4847
"raw-loader": "^0.5.1",
49-
"rimraf": "^2.5.1",
50-
"streamifier": "^0.1.1",
51-
"ipfs-block-service": "^0.3.0",
52-
"string-to-stream": "^1.0.1"
48+
"rimraf": "^2.5.4",
49+
"run-series": "^1.1.4"
5350
},
5451
"dependencies": {
55-
"block-stream2": "^1.1.0",
56-
"bs58": "^3.0.0",
57-
"debug": "^2.2.0",
58-
"field-trip": "0.0.3",
59-
"ipfs-merkle-dag": "^0.6.1",
60-
"ipfs-unixfs": "^0.1.0",
52+
"ipfs-merkle-dag": "^0.7.0",
53+
"ipfs-unixfs": "^0.1.4",
6154
"is-ipfs": "^0.2.0",
62-
"isstream": "^0.1.2",
6355
"multihashes": "^0.2.2",
64-
"readable-stream": "^1.1.13",
65-
"run-series": "^1.1.4",
66-
"streamifier": "^0.1.1",
67-
"through2": "^2.0.0"
56+
"pull-block": "^1.0.2",
57+
"pull-pushable": "^2.0.1",
58+
"pull-stream": "^3.4.5",
59+
"pull-traverse": "^1.0.3",
60+
"pull-write": "^1.1.0",
61+
"run-parallel": "^1.1.6"
6862
},
6963
"contributors": [
7064
"David Dias <[email protected]>",
@@ -75,4 +69,4 @@
7569
"greenkeeperio-bot <[email protected]>",
7670
"nginnever <[email protected]>"
7771
]
78-
}
72+
}

src/chunker-fixed-size.js

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
'use strict'
22

3-
const chunker = require('block-stream2')
3+
const block = require('pull-block')
44

55
exports = module.exports = function (size) {
6-
return chunker({ size: size, zeroPadding: false })
6+
return block(size, {zeroPadding: false})
77
}

src/clean-multihash.js

-14
This file was deleted.

src/exporter.js

+30-114
Original file line numberDiff line numberDiff line change
@@ -1,124 +1,40 @@
11
'use strict'
22

3-
const debug = require('debug')
4-
const log = debug('unixfs')
5-
log.err = debug('unixfs:error')
63
const isIPFS = require('is-ipfs')
7-
const UnixFS = require('ipfs-unixfs')
8-
const series = require('run-series')
9-
const Readable = require('readable-stream').Readable
10-
const pathj = require('path')
11-
const util = require('util')
12-
const fieldtrip = require('field-trip')
13-
const cleanMultihash = require('./clean-multihash')
4+
const traverse = require('pull-traverse')
5+
const pull = require('pull-stream')
146

15-
exports = module.exports = Exporter
7+
const util = require('./util')
8+
const switchType = util.switchType
9+
const cleanMultihash = util.cleanMultihash
1610

17-
util.inherits(Exporter, Readable)
11+
const dirExporter = require('./exporters/dir')
12+
const fileExporter = require('./exporters/file')
1813

19-
function Exporter (hash, dagService, options) {
20-
if (!(this instanceof Exporter)) {
21-
return new Exporter(hash, dagService, options)
22-
}
23-
24-
// Sanitize hash
25-
if (!isIPFS.multihash(hash)) {
26-
throw new Error('not valid multihash')
27-
}
14+
module.exports = (hash, dagService, options) => {
2815
hash = cleanMultihash(hash)
29-
30-
Readable.call(this, { objectMode: true })
31-
32-
this.options = options || {}
33-
34-
this._read = (n) => {}
35-
36-
let fileExporter = (node, name, done) => {
37-
if (!done) {
38-
throw new Error('done must be set')
39-
}
40-
41-
const contentRS = new Readable()
42-
contentRS._read = () => {}
43-
44-
// Logic to export a single (possibly chunked) unixfs file.
45-
if (node.links.length === 0) {
46-
const unmarshaledData = UnixFS.unmarshal(node.data)
47-
contentRS.push(unmarshaledData.data)
48-
contentRS.push(null)
49-
this.push({ content: contentRS, path: name })
50-
done()
51-
} else {
52-
const array = node.links.map((link) => {
53-
return (cb) => {
54-
dagService.get(link.hash, (err, res) => {
55-
if (err) {
56-
return cb(err)
57-
}
58-
var unmarshaledData = UnixFS.unmarshal(res.data)
59-
contentRS.push(unmarshaledData.data)
60-
cb()
61-
})
62-
}
63-
})
64-
series(array, (err) => {
65-
if (err) {
66-
return contentRS.emit('error', err)
67-
}
68-
contentRS.push(null)
69-
})
70-
this.push({ content: contentRS, path: name })
71-
done()
72-
}
73-
}
74-
75-
// Logic to export a unixfs directory.
76-
let dirExporter = (node, name, add, done) => {
77-
if (!add) {
78-
throw new Error('add must be set')
79-
}
80-
if (!done) {
81-
throw new Error('done must be set')
82-
}
83-
84-
this.push({content: null, path: name})
85-
86-
// Directory has links
87-
if (node.links.length > 0) {
88-
node.links.forEach((link) => {
89-
add({ path: pathj.join(name, link.name), hash: link.hash })
90-
})
91-
}
92-
done()
93-
}
94-
95-
// Traverse the DAG asynchronously
96-
fieldtrip([{path: hash, hash: hash}], visit.bind(this), (err) => {
97-
if (err) {
98-
return this.emit('error', err)
99-
}
100-
this.push(null)
101-
})
102-
103-
// Visit function: called once per node in the exported graph
104-
function visit (item, add, done) {
105-
dagService.get(item.hash, (err, node) => {
106-
if (err) {
107-
return this.emit('error', err)
108-
}
109-
110-
const data = UnixFS.unmarshal(node.data)
111-
const type = data.type
112-
113-
if (type === 'directory') {
114-
dirExporter(node, item.path, add, done)
115-
}
116-
117-
if (type === 'file') {
118-
fileExporter(node, item.path, done)
119-
}
120-
})
16+
options = options || {}
17+
18+
function visitor (item) {
19+
return pull(
20+
dagService.getStream(item.hash),
21+
pull.map((node) => switchType(
22+
node,
23+
() => dirExporter(node, item.path, dagService),
24+
() => fileExporter(node, item.path, dagService)
25+
)),
26+
pull.flatten()
27+
)
12128
}
12229

123-
return this
30+
// Traverse the DAG
31+
return pull(
32+
dagService.getStream(hash),
33+
pull.map((node) => switchType(
34+
node,
35+
() => traverse.widthFirst({path: hash, hash}, visitor),
36+
() => fileExporter(node, hash, dagService)
37+
)),
38+
pull.flatten()
39+
)
12440
}

src/exporters/dir.js

+28
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
'use strict'
2+
3+
const path = require('path')
4+
const pull = require('pull-stream')
5+
6+
const fileExporter = require('./file')
7+
const switchType = require('../util').switchType
8+
9+
// Logic to export a unixfs directory.
10+
module.exports = (node, name, dagService) => {
11+
return pull(
12+
pull.values(node.links),
13+
pull.map((link) => ({
14+
path: path.join(name, link.name),
15+
hash: link.hash
16+
})),
17+
pull.map((item) => pull(
18+
dagService.getStream(item.hash),
19+
pull.map((n) => switchType(
20+
n,
21+
() => pull.values([item]),
22+
() => fileExporter(n, item.path, dagService)
23+
)),
24+
pull.flatten()
25+
)),
26+
pull.flatten()
27+
)
28+
}

src/exporters/file.js

+31
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
'use strict'
2+
3+
const UnixFS = require('ipfs-unixfs')
4+
const pull = require('pull-stream')
5+
6+
function extractContent (node) {
7+
const raw = UnixFS.unmarshal(node.data)
8+
return pull.values([raw.data])
9+
}
10+
11+
// Logic to export a single (possibly chunked) unixfs file.
12+
module.exports = (node, name, ds) => {
13+
let content
14+
15+
if (node.links.length === 0) {
16+
content = extractContent(node)
17+
} else {
18+
content = pull(
19+
pull.values(node.links),
20+
pull.map((link) => ds.getStream(link.hash)),
21+
pull.flatten(),
22+
pull.map(extractContent),
23+
pull.flatten()
24+
)
25+
}
26+
27+
return pull.values([{
28+
content: content,
29+
path: name
30+
}])
31+
}

0 commit comments

Comments
 (0)