Skip to content
This repository was archived by the owner on Aug 12, 2020. It is now read-only.

Commit b5acb24

Browse files
committed
Rename "stream" to "content" in tuples.
1 parent 194e19a commit b5acb24

File tree

5 files changed

+21
-21
lines changed

5 files changed

+21
-21
lines changed

README.md

+3-3
Original file line numberDiff line numberDiff line change
@@ -47,8 +47,8 @@ const res = []
4747

4848
const rs = fs.createReadStream(file)
4949
const rs2 = fs.createReadStream(file2)
50-
const input = {path: /tmp/foo/bar, stream: rs}
51-
const input2 = {path: /tmp/foo/quxx, stream: rs2}
50+
const input = {path: /tmp/foo/bar, content: rs}
51+
const input2 = {path: /tmp/foo/quxx, content: rs2}
5252

5353
// Listen for the data event from the importer stream
5454

@@ -138,7 +138,7 @@ exportEvent.on('data', (result) => {
138138
const Importer = require('ipfs-unixfs-engine').exporter
139139
```
140140

141-
The exporter is a readable stream in object mode that returns an object ```{ stream: stream, path: 'path' }``` by the multihash of the file from the dag service.
141+
The exporter is a readable stream in object mode that returns an object ```{ content: stream, path: 'path' }``` by the multihash of the file from the dag service.
142142

143143

144144
## install

src/exporter.js

+3-3
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ function Exporter (hash, dagService, options) {
4242
rs.push(unmarshaledData.data)
4343
rs.push(null)
4444
}
45-
this.push({ stream: rs, path: name })
45+
this.push({ content: rs, path: name })
4646
callback()
4747
return
4848
} else {
@@ -75,7 +75,7 @@ function Exporter (hash, dagService, options) {
7575
return
7676
})
7777
}
78-
this.push({ stream: rs, path: name })
78+
this.push({ content: rs, path: name })
7979
callback()
8080
return
8181
}
@@ -97,7 +97,7 @@ function Exporter (hash, dagService, options) {
9797
rs.push(node.data)
9898
rs.push(null)
9999
}
100-
this.push({stream: null, path: name})
100+
this.push({content: null, path: name})
101101
callback()
102102
return
103103
} else {

src/importer.js

+2-2
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ function Importer (dagService, options) {
3636
this._write = (fl, enc, next) => {
3737
this.read()
3838
counter++
39-
if (!fl.stream) {
39+
if (!fl.content) {
4040
// 1. create the empty dir dag node
4141
// 2. write it to the dag store
4242
// 3. add to the files array {path: <>, hash: <>}
@@ -64,7 +64,7 @@ function Importer (dagService, options) {
6464
}
6565

6666
const leaves = []
67-
fl.stream
67+
fl.content
6868
.pipe(fsc(CHUNK_SIZE))
6969
.pipe(through2((chunk, enc, cb) => {
7070
// 1. create the unixfs merkledag node

test/test-exporter.js

+4-4
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ module.exports = function (repo) {
3333
expect(err).to.not.exist
3434
const testExport = exporter(hash, ds)
3535
testExport.on('data', (file) => {
36-
file.stream.pipe(bl((err, bldata) => {
36+
file.content.pipe(bl((err, bldata) => {
3737
expect(err).to.not.exist
3838
expect(bldata).to.deep.equal(unmarsh.data)
3939
done()
@@ -48,7 +48,7 @@ module.exports = function (repo) {
4848
const ds = new DAGService(bs)
4949
const testExport = exporter(hash, ds)
5050
testExport.on('data', (file) => {
51-
file.stream.pipe(bl((err, bldata) => {
51+
file.content.pipe(bl((err, bldata) => {
5252
expect(bldata).to.deep.equal(bigFile)
5353
expect(err).to.not.exist
5454
done()
@@ -63,7 +63,7 @@ module.exports = function (repo) {
6363
const testExport = exporter(hash, ds)
6464
testExport.on('data', (file) => {
6565
expect(file.path).to.equal('QmRQgufjp9vLE8XK2LGKZSsPCFCF6e4iynCQtNB5X2HBKE')
66-
file.stream.pipe(bl((err, bldata) => {
66+
file.content.pipe(bl((err, bldata) => {
6767
expect(err).to.not.exist
6868
done()
6969
}))
@@ -94,7 +94,7 @@ module.exports = function (repo) {
9494
const ds = new DAGService(bs)
9595
const testExport = exporter(hash, ds)
9696
testExport.on('data', (dir) => {
97-
expect(dir.stream).to.equal(null)
97+
expect(dir.content).to.equal(null)
9898
done()
9999
})
100100
})

test/test-importer.js

+9-9
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ module.exports = function (repo) {
3838
expect(bs58.encode(obj.multihash)).to.equal('QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8')
3939
expect(obj.size).to.equal(211)
4040
})
41-
i.write({path: '200Bytes.txt', stream: r})
41+
i.write({path: '200Bytes.txt', content: r})
4242
i.end()
4343
i.on('end', () => {
4444
done()
@@ -69,7 +69,7 @@ module.exports = function (repo) {
6969
i.on('end', () => {
7070
done()
7171
})
72-
i.write({path: 'foo/bar/200Bytes.txt', stream: r})
72+
i.write({path: 'foo/bar/200Bytes.txt', content: r})
7373
i.end()
7474
})
7575

@@ -85,7 +85,7 @@ module.exports = function (repo) {
8585
i.on('end', () => {
8686
done()
8787
})
88-
i.write({path: '1.2MiB.txt', stream: r})
88+
i.write({path: '1.2MiB.txt', content: r})
8989
i.end()
9090
})
9191

@@ -106,7 +106,7 @@ module.exports = function (repo) {
106106
i.on('end', () => {
107107
done()
108108
})
109-
i.write({path: 'foo-big/1.2MiB.txt', stream: r})
109+
i.write({path: 'foo-big/1.2MiB.txt', content: r})
110110
i.end()
111111
})
112112

@@ -156,8 +156,8 @@ module.exports = function (repo) {
156156
i.on('end', () => {
157157
done()
158158
})
159-
i.write({path: 'pim/200Bytes.txt', stream: r1})
160-
i.write({path: 'pim/1.2MiB.txt', stream: r2})
159+
i.write({path: 'pim/200Bytes.txt', content: r1})
160+
i.write({path: 'pim/1.2MiB.txt', content: r2})
161161
i.end()
162162
})
163163

@@ -195,9 +195,9 @@ module.exports = function (repo) {
195195
i.on('end', () => {
196196
done()
197197
})
198-
i.write({path: 'pam/pum/200Bytes.txt', stream: r1})
199-
i.write({path: 'pam/pum/1.2MiB.txt', stream: r2})
200-
i.write({path: 'pam/1.2MiB.txt', stream: r3})
198+
i.write({path: 'pam/pum/200Bytes.txt', content: r1})
199+
i.write({path: 'pam/pum/1.2MiB.txt', content: r2})
200+
i.write({path: 'pam/1.2MiB.txt', content: r3})
201201
i.end()
202202
})
203203
})

0 commit comments

Comments
 (0)