diff --git a/package.json b/package.json index ac57b0fe4..90e68d2bb 100644 --- a/package.json +++ b/package.json @@ -5,13 +5,15 @@ "main": "lib/index.js", "jsnext:main": "src/index.js", "dependencies": { - "bl": "^1.1.2", + "async": "^2.0.0-rc.5", "babel-runtime": "^6.6.1", + "bl": "^1.1.2", "bs58": "^3.0.0", "detect-node": "^2.0.3", "flatmap": "0.0.3", "glob": "^7.0.3", "ipfs-merkle-dag": "^0.6.0", + "isstream": "^0.1.2", "multiaddr": "^2.0.0", "multipart-stream": "^2.0.1", "ndjson": "^1.4.3", diff --git a/src/add-to-dagnode-transform.js b/src/add-to-dagnode-transform.js new file mode 100644 index 000000000..61d38b4bd --- /dev/null +++ b/src/add-to-dagnode-transform.js @@ -0,0 +1,20 @@ +'use strict' + +const async = require('async') +const getDagNode = require('./get-dagnode') + +// transform { Hash: '...' } objects into { path: 'string', node: DAGNode } +module.exports = function (err, res, send, done) { + if (err) return done(err) + async.map(res, function map (entry, next) { + getDagNode(send, entry.Hash, function (err, node) { + var obj = { + path: entry.Name, + node: node + } + next(null, obj) + }) + }, function (err, res) { + done(err, res) + }) +} diff --git a/src/api/add-files.js b/src/api/add-files.js new file mode 100644 index 000000000..c5362f643 --- /dev/null +++ b/src/api/add-files.js @@ -0,0 +1,20 @@ +'use strict' + +const addToDagNodesTransform = require('../add-to-dagnode-transform') + +module.exports = (send) => { + return function add (path, opts, cb) { + if (typeof (opts) === 'function' && cb === undefined) { + cb = opts + opts = {} + } + + if (typeof (path) !== 'string') { + return cb(new Error('"path" must be a string')) + } + + var sendWithTransform = send.withTransform(addToDagNodesTransform) + + return sendWithTransform('add', null, opts, path, cb) + } +} diff --git a/src/api/add-url.js b/src/api/add-url.js new file mode 100644 index 000000000..fccb96e75 --- /dev/null +++ b/src/api/add-url.js @@ -0,0 +1,25 @@ +'use strict' + +const Wreck = require('wreck') +const addToDagNodesTransform = require('../add-to-dagnode-transform') + +module.exports = (send) => { + return function add (url, opts, cb) { + if (typeof (opts) === 'function' && cb === undefined) { + cb = opts + opts = {} + } + + if (typeof url !== 'string' || !url.startsWith('http')) { + return cb(new Error('"url" param must be an http(s) url')) + } + + var sendWithTransform = send.withTransform(addToDagNodesTransform) + + Wreck.request('GET', url, null, (err, res) => { + if (err) return cb(err) + + sendWithTransform('add', null, opts, res, cb) + }) + } +} diff --git a/src/api/add.js b/src/api/add.js index 4b0d4e6b8..0b465ef7b 100644 --- a/src/api/add.js +++ b/src/api/add.js @@ -1,6 +1,7 @@ 'use strict' -const Wreck = require('wreck') +const isStream = require('isstream') +const addToDagNodesTransform = require('../add-to-dagnode-transform') module.exports = (send) => { return function add (files, opts, cb) { @@ -9,14 +10,16 @@ module.exports = (send) => { opts = {} } - if (typeof files === 'string' && files.startsWith('http')) { - return Wreck.request('GET', files, null, (err, res) => { - if (err) return cb(err) + var good = Buffer.isBuffer(files) || + isStream.isReadable(files) || + Array.isArray(files) - send('add', null, opts, res, cb) - }) + if (!good) { + return cb(new Error('"files" must be a buffer, readable stream, or array of objects')) } - return send('add', null, opts, files, cb) + var sendWithTransform = send.withTransform(addToDagNodesTransform) + + return sendWithTransform('add', null, opts, files, cb) } } diff --git a/src/get-dagnode.js b/src/get-dagnode.js new file mode 100644 index 000000000..25dd3aaf2 --- /dev/null +++ b/src/get-dagnode.js @@ -0,0 +1,36 @@ +'use strict' + +const DAGNode = require('ipfs-merkle-dag').DAGNode +const bl = require('bl') +const async = require('async') + +module.exports = function (send, hash, cb) { + + // Retrieve the object and its data in parallel, then produce a DAGNode + // instance using this information. + async.parallel([ + function get (done) { + send('object/get', hash, null, null, done) + }, + + function data (done) { + // WORKAROUND: request the object's data separately, since raw bits in JSON + // are interpreted as UTF-8 and corrupt the data. + // See https://github.com/ipfs/go-ipfs/issues/1582 for more details. + send('object/data', hash, null, null, done) + }], + + function done (err, res) { + if (err) return cb(err) + + var object = res[0] + var stream = res[1] + + stream.pipe(bl(function (err, data) { + if (err) return cb(err) + + cb(err, new DAGNode(data, object.Links)) + })) + }) + +} diff --git a/src/load-commands.js b/src/load-commands.js index fc5ee614b..3c451018c 100644 --- a/src/load-commands.js +++ b/src/load-commands.js @@ -1,8 +1,7 @@ 'use strict' function requireCommands () { - return { - add: require('./api/add'), + var cmds = { bitswap: require('./api/bitswap'), block: require('./api/block'), cat: require('./api/cat'), @@ -11,7 +10,6 @@ function requireCommands () { dht: require('./api/dht'), diag: require('./api/diag'), id: require('./api/id'), - files: require('./api/files'), log: require('./api/log'), ls: require('./api/ls'), mount: require('./api/mount'), @@ -24,6 +22,24 @@ function requireCommands () { update: require('./api/update'), version: require('./api/version') } + + // TODO: crowding the 'files' namespace temporarily for interface-ipfs-core + // compatibility, until 'files vs mfs' naming decision is resolved. + cmds.files = function (send) { + var files = require('./api/files')(send) + files.add = require('./api/add')(send) + return files + } + + cmds.util = function (send) { + var util = { + addFiles: require('./api/add-files')(send), + addUrl: require('./api/add-url')(send) + } + return util + } + + return cmds } function loadCommands (send) { diff --git a/src/request-api.js b/src/request-api.js index 7b2d73c53..753305bc0 100644 --- a/src/request-api.js +++ b/src/request-api.js @@ -110,7 +110,7 @@ function requestAPI (config, path, args, qs, files, buffer, cb) { // -- Interface exports = module.exports = function getRequestAPI (config) { - return function (path, args, qs, files, buffer, cb) { + var send = function (path, args, qs, files, buffer, cb) { if (typeof buffer === 'function') { cb = buffer buffer = false @@ -127,4 +127,41 @@ exports = module.exports = function getRequestAPI (config) { return requestAPI(config, path, args, qs, files, buffer, cb) } + + // Wraps the 'send' function such that an asynchronous transform may be + // applied to its result before passing it on to either its callback or + // promise. + send.withTransform = function (transform) { + return function (path, args, qs, files, buffer, cb) { + if (typeof buffer === 'function') { + cb = buffer + buffer = false + } + + var p = send(path, args, qs, files, buffer, wrap(cb)) + + if (p instanceof Promise) { + return p.then((res) => { + return new Promise(function (resolve, reject) { + transform(null, res, send, function (err, res) { + if (err) reject(err) + else resolve(res) + }) + }) + }) + } else { + return p + } + + function wrap (done) { + if (done) { + return function (err, res) { + transform(err, res, send, done) + } + } + } + } + } + + return send } diff --git a/test/api/add.spec.js b/test/api/add.spec.js index 6c09509ce..67c190235 100644 --- a/test/api/add.spec.js +++ b/test/api/add.spec.js @@ -2,189 +2,15 @@ /* globals apiClients */ 'use strict' -const expect = require('chai').expect -const Readable = require('stream').Readable -const path = require('path') -const isNode = require('detect-node') -const fs = require('fs') - -let testfile -let testfileBig -const testfilePath = path.join(__dirname, '/../testfile.txt') - -if (isNode) { - testfile = fs.readFileSync(testfilePath) - testfileBig = fs.createReadStream(path.join(__dirname, '/../15mb.random'), { bufferSize: 128 }) - // testfileBig = fs.createReadStream(path.join(__dirname, '/../100mb.random'), { bufferSize: 128 }) -} else { - testfile = require('raw!../testfile.txt') - // browser goes nuts with a 100mb in memory - // testfileBig = require('raw!../100mb.random') +const test = require('interface-ipfs-core') + +const common = { + setup: function (cb) { + cb(null, apiClients.a) + }, + teardown: function (cb) { + cb() + } } -describe('.add', () => { - it('add file', (done) => { - if (!isNode) { - return done() - } - - const file = { - path: 'testfile.txt', - content: new Buffer(testfile) - } - - apiClients.a.add([file], (err, res) => { - expect(err).to.not.exist - - const added = res[0] != null ? res[0] : res - expect(added).to.have.property('Hash', 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') - expect(added).to.have.property('Name', 'testfile.txt') - done() - }) - }) - - it('add buffer', (done) => { - let buf = new Buffer(testfile) - apiClients.a.add(buf, (err, res) => { - expect(err).to.not.exist - - expect(res).to.have.length(1) - expect(res[0]).to.have.property('Hash', 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') - done() - }) - }) - - it('add BIG buffer', (done) => { - if (!isNode) { - return done() - } - - apiClients.a.add(testfileBig, (err, res) => { - expect(err).to.not.exist - - expect(res).to.have.length(1) - expect(res[0]).to.have.a.property('Hash', 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq') - done() - }) - }) - - it('add path', (done) => { - if (!isNode) { - return done() - } - - apiClients.a.add(testfilePath, (err, res) => { - expect(err).to.not.exist - - const added = res[0] != null ? res[0] : res - expect(added).to.have.property('Hash', 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') - done() - }) - }) - - it('add a nested dir following symlinks', (done) => { - apiClients.a.add(path.join(__dirname, '/../test-folder'), { recursive: true }, (err, res) => { - if (isNode) { - expect(err).to.not.exist - - const added = res[res.length - 1] - expect(added).to.have.property('Hash', 'QmRNjDeKStKGTQXnJ2NFqeQ9oW23WcpbmvCVrpDHgDg3T6') - - // check that the symlink was replaced by the target file - const linkPath = 'test-folder/hello-link' - const filePath = 'test-folder/files/hello.txt' - const linkHash = res.filter((e) => e.Name === linkPath)[0].Hash - const fileHash = res.filter((e) => e.Name === filePath)[0].Hash - expect(linkHash).to.equal(fileHash) - - done() - } else { - expect(err.message).to.be.equal('Recursive uploads are not supported in the browser') - done() - } - }) - }) - - it('add a nested dir without following symlinks', (done) => { - apiClients.a.add(path.join(__dirname, '/../test-folder'), { recursive: true, followSymlinks: false }, (err, res) => { - if (isNode) { - expect(err).to.not.exist - - const added = res[res.length - 1] - // same hash as the result from the cli (ipfs add test/test-folder -r) - expect(added).to.have.property('Hash', 'QmRArDYd8Rk7Zb7K2699KqmQM1uUoejn1chtEAcqkvjzGg') - done() - } else { - expect(err.message).to.be.equal('Recursive uploads are not supported in the browser') - done() - } - }) - }) - - it('add a nested dir as array', (done) => { - if (!isNode) return done() - const fs = require('fs') - const base = path.join(__dirname, '../test-folder') - const content = (name) => ({ - path: `test-folder/${name}`, - content: fs.readFileSync(path.join(base, name)) - }) - const dirs = [ - content('add.js'), - content('cat.js'), - content('ls.js'), - content('ipfs-add.js'), - content('version.js'), - content('files/hello.txt'), - content('files/ipfs.txt'), - { - path: 'test-folder', - dir: true - } - ] - - apiClients.a.add(dirs, { recursive: true }, (err, res) => { - expect(err).to.not.exist - - const added = res[res.length - 1] - expect(added).to.have.property('Hash', 'QmTDH2RXGn8XyDAo9YyfbZAUXwL1FCr44YJCN9HBZmL9Gj') - done() - }) - }) - - it('add stream', (done) => { - const stream = new Readable() - stream.push('Hello world') - stream.push(null) - - apiClients.a.add(stream, (err, res) => { - expect(err).to.not.exist - - const added = res[0] != null ? res[0] : res - expect(added).to.have.a.property('Hash', 'QmNRCQWfgze6AbBCaT1rkrkV5tJ2aP4oTNPb5JZcXYywve') - done() - }) - }) - - it('add url', (done) => { - const url = 'https://raw.githubusercontent.com/ipfs/js-ipfs-api/2a9cc63d7427353f2145af6b1a768a69e67c0588/README.md' - apiClients.a.add(url, (err, res) => { - expect(err).to.not.exist - - const added = res[0] != null ? res[0] : res - expect(added).to.have.a.property('Hash', 'QmZmHgEX9baxUn3qMjsEXQzG6DyNcrVnwieQQTrpDdrFvt') - done() - }) - }) - - describe('promise', () => { - it('add buffer', () => { - let buf = new Buffer(testfile) - return apiClients.a.add(buf) - .then((res) => { - expect(res).to.have.length(1) - expect(res[0]).to.have.property('Hash', 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') - }) - }) - }) -}) +test.files(common) diff --git a/test/api/ls.spec.js b/test/api/ls.spec.js index 97692dbca..2dbcd075a 100644 --- a/test/api/ls.spec.js +++ b/test/api/ls.spec.js @@ -30,7 +30,7 @@ describe('ls', function () { it('should correctly handle a nonexisting path', function (done) { if (!isNode) return done() - apiClients.a.ls('QmRNjDeKStKGTQXnJ2NFqeQ9oW23WcpbmvCVrpDHgDg3T6/folder_that_isnt_there', (err, res) => { + apiClients.a.ls('QmRNjDeKStKGTQXnJ2NFqeQ9oW/folder_that_isnt_there', (err, res) => { expect(err).to.exist expect(res).to.not.exist done() @@ -59,7 +59,7 @@ describe('ls', function () { it('should correctly handle a nonexisting path', () => { if (!isNode) return - return apiClients.a.ls('QmRNjDeKStKGTQXnJ2NFqeQ9oW23WcpbmvCVrpDHgDg3T6/folder_that_isnt_there') + return apiClients.a.ls('QmRNjDeKStKGTQXnJ3NFqeQ9oW/folder_that_isnt_there') .catch((err) => { expect(err).to.exist })