diff --git a/README.md b/README.md index 79badbfc..2fdb912d 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ IPFS unixFS Engine =================== -> Import data into an IPFS DAG Service. +> Import & Export data to/from an [IPFS DAG Service][] [![](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://ipn.io) [![](https://img.shields.io/badge/freenode-%23ipfs-blue.svg?style=flat-square)](http://webchat.freenode.net/?channels=%23ipfs) @@ -74,24 +74,24 @@ When run, the stat of DAG Node is outputted for each file on data event until th ``` { multihash: , - Size: 39243, + size: 39243, path: '/tmp/foo/bar' } { multihash: , - Size: 59843, + size: 59843, path: '/tmp/foo/quxx' } { multihash: , - Size: 93242, + size: 93242, path: '/tmp/foo' } { multihash: , - Size: 94234, + size: 94234, path: '/tmp' } ``` -## API +## Importer API ```js const Importer = require('ipfs-unixfs-engine').importer @@ -99,16 +99,22 @@ const Importer = require('ipfs-unixfs-engine').importer ### const add = new Importer(dag) -The importer is a duplex stream in object mode that writes inputs of tuples -of path and readable streams of data. You can stream an array of files to the -importer, just call the 'end' function to signal that you are done inputting file/s. -Listen to the 'data' for the returned informtion 'multihash, size and path' for -each file added. Listen to the 'end' event from the stream to know when the -importer has finished importing files. Input file paths with directory structure -will preserve the hierarchy in the dag node. +The importer is a object Transform stream that accepts objects of the form + +```js +{ + path: 'a name', + content: (Buffer or Readable stream) +} +``` + +The stream will output IPFS DAG Node stats for the nodes it as added to the DAG +Service. When stats on a node are emitted they are guaranteed to have been +written into the DAG Service's storage mechanism. + +The input's file paths and directory structure will be preserved in the DAG +Nodes. -Uses the [DAG Service](https://github.com/vijayee/js-ipfs-merkle-dag/) instance -`dagService`. ## Example Exporter @@ -133,15 +139,24 @@ exportEvent.on('data', (result) => { } ``` -##API +## Exporter: API ```js -const Importer = require('ipfs-unixfs-engine').exporter +const Exporter = require('ipfs-unixfs-engine').exporter ``` -The exporter is a readable stream in object mode that returns an object ```{ content: stream, path: 'path' }``` by the multihash of the file from the dag service. +The exporter is a readable stream in object mode that outputs objects of the +form +```js +{ + path: 'a name', + content: (Buffer or Readable stream) +} +``` -## install +by the multihash of the file from the DAG Service. + +## Install With [npm](https://npmjs.org/) installed, run @@ -149,6 +164,9 @@ With [npm](https://npmjs.org/) installed, run $ npm install ipfs-unixfs-engine ``` -## license +## License ISC + + +[IPFS DAG Service]: https://github.com/vijayee/js-ipfs-merkle-dag/ diff --git a/package.json b/package.json index 834bca3f..1f6445b9 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "ipfs-unixfs-engine", "version": "0.8.0", "description": "JavaScript implementation of the unixfs Engine used by IPFS", - "main": "lib/index.js", + "main": "src/index.js", "jsnext:main": "src/index.js", "scripts": { "lint": "aegir-lint", @@ -70,4 +70,4 @@ "greenkeeperio-bot ", "nginnever " ] -} \ No newline at end of file +} diff --git a/src/importer.js b/src/importer.js index 5eaa01e0..aede8925 100644 --- a/src/importer.js +++ b/src/importer.js @@ -238,13 +238,15 @@ function Importer (dagService, options) { // If the value is not an object // add as a link to the dirNode - function traverse (tree, base) { + let pendingWrites = 0 + + function traverse (tree, path, done) { const keys = Object.keys(tree) let tmpTree = tree keys.map((key) => { if (typeof tmpTree[key] === 'object' && !Buffer.isBuffer(tmpTree[key])) { - tmpTree[key] = traverse.call(this, tmpTree[key], base ? base + '/' + key : key) + tmpTree[key] = traverse.call(this, tmpTree[key], path ? path + '/' + key : key, done) } }) @@ -264,28 +266,39 @@ function Importer (dagService, options) { }) n.data = d.marshal() + + pendingWrites++ dagService.add(n, (err) => { + pendingWrites-- if (err) { this.push({error: 'failed to store dirNode'}) + } else if (path) { + const el = { + path: path, + multihash: n.multihash(), + yes: 'no', + size: n.size() + } + this.push(el) + } + + if (pendingWrites <= 0) { + done() } }) - if (!base) { + if (!path) { return } - const el = { - path: base, - multihash: n.multihash(), - size: n.size() - } - this.push(el) - mhIndex[bs58.encode(n.multihash())] = { size: n.size() } return n.multihash() } - /* const rootHash = */ traverse.call(this, fileTree) - this.push(null) + + let self = this + /* const rootHash = */ traverse.call(this, fileTree, null, function () { + self.push(null) + }) } } }