Skip to content
This repository was archived by the owner on Aug 12, 2020. It is now read-only.

Commit 51d1245

Browse files
authored
feat: upgrade to latest dag-pb API (#88)
* feat: upgrade to latest dag-pb API
1 parent 0e025dc commit 51d1245

File tree

6 files changed

+80
-117
lines changed

6 files changed

+80
-117
lines changed

package.json

+4-4
Original file line numberDiff line numberDiff line change
@@ -52,15 +52,15 @@
5252
"rimraf": "^2.5.4"
5353
},
5454
"dependencies": {
55-
"async": "^2.1.2",
55+
"async": "^2.1.4",
5656
"cids": "^0.2.0",
5757
"ipfs-unixfs": "^0.1.5",
58-
"ipld-dag-pb": "^0.8.0",
59-
"ipld-resolver": "^0.2.0",
58+
"ipld-dag-pb": "^0.9.1",
59+
"ipld-resolver": "^0.3.0",
6060
"is-ipfs": "^0.2.1",
6161
"multihashes": "^0.2.2",
6262
"pull-block": "^1.0.2",
63-
"pull-paramap": "^1.2.0",
63+
"pull-paramap": "^1.2.1",
6464
"pull-pushable": "^2.0.1",
6565
"pull-stream": "^3.5.0",
6666
"pull-traverse": "^1.0.3",

src/exporter/dir.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ module.exports = (node, name, ipldResolver) => {
2424
pull.values(node.links),
2525
pull.map((link) => ({
2626
path: path.join(name, link.name),
27-
hash: link.hash
27+
hash: link.multihash
2828
})),
2929
paramap((item, cb) => ipldResolver.get(new CID(item.hash), (err, n) => {
3030
if (err) {

src/exporter/file.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ module.exports = (node, name, ipldResolver) => {
2020
function visitor (node) {
2121
return pull(
2222
pull.values(node.links),
23-
paramap((link, cb) => ipldResolver.get(new CID(link.hash), cb))
23+
paramap((link, cb) => ipldResolver.get(new CID(link.multihash), cb))
2424
)
2525
}
2626

src/importer/flush-tree.js

+25-33
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,7 @@ const UnixFS = require('ipfs-unixfs')
55
const CID = require('cids')
66
const dagPB = require('ipld-dag-pb')
77
const mapValues = require('async/mapValues')
8-
const parallel = require('async/parallel')
9-
8+
const waterfall = require('async/waterfall')
109
const DAGLink = dagPB.DAGLink
1110
const DAGNode = dagPB.DAGNode
1211

@@ -120,44 +119,37 @@ function traverse (tree, sizeIndex, path, ipldResolver, source, done) {
120119
// return this new node multihash
121120

122121
const keys = Object.keys(tree)
123-
124-
const ufsDir = new UnixFS('directory')
125-
const node = new DAGNode(ufsDir.marshal())
126-
127-
keys.forEach((key) => {
122+
const dir = new UnixFS('directory')
123+
const links = keys.map((key) => {
128124
const b58mh = mh.toB58String(tree[key])
129-
const link = new DAGLink(key, sizeIndex[b58mh], tree[key])
130-
node.addRawLink(link)
125+
return new DAGLink(key, sizeIndex[b58mh], tree[key])
131126
})
132127

133-
parallel([
134-
(cb) => node.multihash(cb),
135-
(cb) => node.size(cb)
136-
], (err, res) => {
128+
waterfall([
129+
(cb) => DAGNode.create(dir.marshal(), links, cb),
130+
(node, cb) => {
131+
sizeIndex[mh.toB58String(node.multihash)] = node.size
132+
133+
ipldResolver.put({
134+
node: node,
135+
cid: new CID(node.multihash)
136+
}, (err) => cb(err, node))
137+
}
138+
], (err, node) => {
137139
if (err) {
140+
source.push(new Error('failed to store dirNode'))
138141
return done(err)
139142
}
140143

141-
const multihash = res[0]
142-
const size = res[1]
143-
144-
sizeIndex[mh.toB58String(multihash)] = size
145-
ipldResolver.put({
146-
node: node,
147-
cid: new CID(multihash)
148-
}, (err) => {
149-
if (err) {
150-
source.push(new Error('failed to store dirNode'))
151-
} else if (path) {
152-
source.push({
153-
path: path,
154-
multihash: multihash,
155-
size: size
156-
})
157-
}
158-
159-
done(null, multihash)
160-
})
144+
if (path) {
145+
source.push({
146+
path: path,
147+
multihash: node.multihash,
148+
size: node.size
149+
})
150+
}
151+
152+
done(null, node.multihash)
161153
})
162154
})
163155
}

src/importer/index.js

+47-76
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ const pullWrite = require('pull-write')
88
const parallel = require('async/parallel')
99
const dagPB = require('ipld-dag-pb')
1010
const CID = require('cids')
11+
const waterfall = require('async/waterfall')
1112

1213
const fsc = require('./../chunker/fixed-size')
1314
const createAndStoreTree = require('./flush-tree')
@@ -71,49 +72,38 @@ function makeWriter (source, files, ipldResolver) {
7172
}
7273
}
7374

74-
function createAndStoreDir (item, ipldResolver, cb) {
75+
function createAndStoreDir (item, ipldResolver, callback) {
7576
// 1. create the empty dir dag node
7677
// 2. write it to the dag store
7778

7879
const d = new UnixFS('directory')
79-
const n = new DAGNode()
80-
n.data = d.marshal()
81-
82-
n.multihash((err, multihash) => {
80+
waterfall([
81+
(cb) => DAGNode.create(d.marshal(), cb),
82+
(node, cb) => {
83+
ipldResolver.put({
84+
node: node,
85+
cid: new CID(node.multihash)
86+
}, (err) => cb(err, node))
87+
}
88+
], (err, node) => {
8389
if (err) {
84-
return cb(err)
90+
return callback(err)
8591
}
86-
87-
ipldResolver.put({
88-
node: n,
89-
cid: new CID(multihash)
90-
}, (err) => {
91-
if (err) {
92-
return cb(err)
93-
}
94-
95-
n.size((err, size) => {
96-
if (err) {
97-
return cb(err)
98-
}
99-
100-
cb(null, {
101-
path: item.path,
102-
multihash: multihash,
103-
size: size
104-
})
105-
})
92+
callback(null, {
93+
path: item.path,
94+
multihash: node.multihash,
95+
size: node.size
10696
})
10797
})
10898
}
10999

110-
function createAndStoreFile (file, ipldResolver, cb) {
100+
function createAndStoreFile (file, ipldResolver, callback) {
111101
if (Buffer.isBuffer(file.content)) {
112102
file.content = pull.values([file.content])
113103
}
114104

115105
if (typeof file.content !== 'function') {
116-
return cb(new Error('invalid content'))
106+
return callback(new Error('invalid content'))
117107
}
118108

119109
// 1. create the unixfs merkledag node
@@ -128,88 +118,69 @@ function createAndStoreFile (file, ipldResolver, cb) {
128118
file.content,
129119
fsc(CHUNK_SIZE),
130120
pull.asyncMap((chunk, cb) => {
131-
const l = new UnixFS('file', Buffer(chunk))
132-
const n = new DAGNode(l.marshal())
121+
const l = new UnixFS('file', new Buffer(chunk))
133122

134-
n.multihash((err, multihash) => {
123+
DAGNode.create(l.marshal(), (err, node) => {
135124
if (err) {
136125
return cb(err)
137126
}
138127

139128
ipldResolver.put({
140-
node: n,
141-
cid: new CID(multihash)
129+
node: node,
130+
cid: new CID(node.multihash)
142131
}, (err) => {
143132
if (err) {
144-
return cb(new Error('Failed to store chunk'))
133+
return cb(err)
145134
}
146135

147-
n.size((err, size) => {
148-
if (err) {
149-
return cb(err)
150-
}
151-
152-
cb(null, {
153-
Hash: multihash,
154-
Size: size,
155-
leafSize: l.fileSize(),
156-
Name: ''
157-
})
136+
cb(null, {
137+
Hash: node.multihash,
138+
Size: node.size,
139+
leafSize: l.fileSize(),
140+
Name: ''
158141
})
159142
})
160143
})
161144
}),
162145
pull.collect((err, leaves) => {
163146
if (err) {
164-
return cb(err)
147+
return callback(err)
165148
}
166149

167150
if (leaves.length === 1) {
168-
return cb(null, {
151+
return callback(null, {
169152
path: file.path,
170153
multihash: leaves[0].Hash,
171154
size: leaves[0].Size
172155
})
173156
}
174157

175158
// create a parent node and add all the leafs
176-
177159
const f = new UnixFS('file')
178-
const n = new DAGNode()
179160

180-
for (let leaf of leaves) {
161+
const links = leaves.map((leaf) => {
181162
f.addBlockSize(leaf.leafSize)
182-
n.addRawLink(
183-
new DAGLink(leaf.Name, leaf.Size, leaf.Hash)
184-
)
185-
}
186163

187-
n.data = f.marshal()
164+
return new DAGLink(leaf.Name, leaf.Size, leaf.Hash)
165+
})
188166

189-
n.multihash((err, multihash) => {
167+
waterfall([
168+
(cb) => DAGNode.create(f.marshal(), links, cb),
169+
(node, cb) => {
170+
ipldResolver.put({
171+
node: node,
172+
cid: new CID(node.multihash)
173+
}, (err) => cb(err, node))
174+
}
175+
], (err, node) => {
190176
if (err) {
191-
return cb(err)
177+
return callback(err)
192178
}
193179

194-
ipldResolver.put({
195-
node: n,
196-
cid: new CID(multihash)
197-
}, (err) => {
198-
if (err) {
199-
return cb(err)
200-
}
201-
202-
n.size((err, size) => {
203-
if (err) {
204-
return cb(err)
205-
}
206-
207-
cb(null, {
208-
path: file.path,
209-
multihash: multihash,
210-
size: size
211-
})
212-
})
180+
callback(null, {
181+
path: file.path,
182+
multihash: node.multihash,
183+
size: node.size
213184
})
214185
})
215186
})

test/test-importer.js

+2-2
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,8 @@ function stringifyMh (files) {
1919
const bigFile = loadFixture(__dirname, 'fixtures/1.2MiB.txt')
2020
const smallFile = loadFixture(__dirname, 'fixtures/200Bytes.txt')
2121

22-
module.exports = function (repo) {
23-
describe('importer', function () {
22+
module.exports = (repo) => {
23+
describe('importer', () => {
2424
let ipldResolver
2525

2626
before(() => {

0 commit comments

Comments
 (0)