Skip to content
This repository was archived by the owner on Aug 12, 2020. It is now read-only.

Add onlyHash option #183

Merged
merged 7 commits into from
Sep 7, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,7 @@ The input's file paths and directory structure will be preserved in the [`dag-pb
- `hamt` (object): the options for the HAMT sharded directory builder
- bits (positive integer, defaults to `8`): the number of bits at each bucket of the HAMT
- `progress` (function): a function that will be called with the byte length of chunks as a file is added to ipfs.
- `onlyHash` (boolean, defaults to false): Only chunk and hash - do not write to disk

### Exporter

Expand Down
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@
},
"contributors": [
"Alan Shaw <[email protected]>",
"Bernard Mordan <[email protected]>",
"David Dias <[email protected]>",
"Francisco Baio Dias <[email protected]>",
"Friedel Ziegelmayer <[email protected]>",
Expand Down
5 changes: 5 additions & 0 deletions src/builder/builder.js
Original file line number Diff line number Diff line change
Expand Up @@ -57,9 +57,12 @@ module.exports = function (createChunker, ipldResolver, createReducer, _options)
// 2. write it to the dag store

const d = new UnixFS('directory')

waterfall([
(cb) => DAGNode.create(d.marshal(), [], options.hashAlg, cb),
(node, cb) => {
if (options.onlyHash) return cb(null, node)

ipldResolver.put(node, {
cid: new CID(node.multihash)
}, (err) => cb(err, node))
Expand Down Expand Up @@ -106,6 +109,8 @@ module.exports = function (createChunker, ipldResolver, createReducer, _options)
})
}),
pull.asyncMap((leaf, callback) => {
if (options.onlyHash) return callback(null, leaf)

ipldResolver.put(leaf.DAGNode, {
cid: new CID(leaf.DAGNode.multihash)
}, (err) => callback(err, leaf)
Expand Down
2 changes: 2 additions & 0 deletions src/builder/reduce.js
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,8 @@ module.exports = function (file, ipldResolver, options) {
waterfall([
(cb) => DAGNode.create(f.marshal(), links, cb),
(node, cb) => {
if (options.onlyHash) return cb(null, node)

ipldResolver.put(node, {
cid: new CID(node.multihash)
}, (err) => cb(err, node))
Expand Down
12 changes: 7 additions & 5 deletions src/importer/dir-flat.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,9 @@ const DAGNode = dagPB.DAGNode
const Dir = require('./dir')

class DirFlat extends Dir {
constructor (props) {
super()
constructor (props, _options) {
super(props, _options)
this._children = {}
Object.assign(this, props)
}

put (name, value, callback) {
Expand Down Expand Up @@ -57,10 +56,13 @@ class DirFlat extends Dir {
})

const dir = new UnixFS('directory')

waterfall(
[
(callback) => DAGNode.create(dir.marshal(), links, callback),
(node, callback) => {
if (this._options.onlyHash) return callback(null, node)

ipldResolver.put(
node,
{
Expand All @@ -86,6 +88,6 @@ class DirFlat extends Dir {

module.exports = createDirFlat

function createDirFlat (props) {
return new DirFlat(props)
function createDirFlat (props, _options) {
return new DirFlat(props, _options)
}
10 changes: 5 additions & 5 deletions src/importer/dir-sharded.js
Original file line number Diff line number Diff line change
Expand Up @@ -41,11 +41,9 @@ const defaultOptions = {

class DirSharded extends Dir {
constructor (props, _options) {
super()
const options = Object.assign({}, defaultOptions, _options)
this._options = options
super(props, options)
this._bucket = Bucket(options)
Object.assign(this, props)
}

put (name, value, callback) {
Expand Down Expand Up @@ -87,8 +85,8 @@ class DirSharded extends Dir {

module.exports = createDirSharded

function createDirSharded (props) {
return new DirSharded(props)
function createDirSharded (props, _options) {
return new DirSharded(props, _options)
}

function flush (options, bucket, path, ipldResolver, source, callback) {
Expand Down Expand Up @@ -148,6 +146,8 @@ function flush (options, bucket, path, ipldResolver, source, callback) {
[
(callback) => DAGNode.create(dir.marshal(), links, callback),
(node, callback) => {
if (options.onlyHash) return callback(null, node)

ipldResolver.put(
node,
{
Expand Down
4 changes: 4 additions & 0 deletions src/importer/dir.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
'use strict'

module.exports = class Dir {
constructor (props, _options) {
this._options = _options || {}
Object.assign(this, props)
}
}
14 changes: 7 additions & 7 deletions src/importer/flat-to-shard.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@ const DirSharded = require('./dir-sharded')

module.exports = flatToShard

function flatToShard (child, dir, threshold, callback) {
maybeFlatToShardOne(dir, threshold, (err, newDir) => {
function flatToShard (child, dir, threshold, options, callback) {
maybeFlatToShardOne(dir, threshold, options, (err, newDir) => {
if (err) {
callback(err)
return // early
Expand All @@ -27,7 +27,7 @@ function flatToShard (child, dir, threshold, callback) {
},
(callback) => {
if (parent) {
flatToShard(newDir, parent, threshold, callback)
flatToShard(newDir, parent, threshold, options, callback)
} else {
callback(null, newDir)
}
Expand All @@ -40,15 +40,15 @@ function flatToShard (child, dir, threshold, callback) {
})
}

function maybeFlatToShardOne (dir, threshold, callback) {
function maybeFlatToShardOne (dir, threshold, options, callback) {
if (dir.flat && dir.directChildrenCount() >= threshold) {
definitelyShardOne(dir, callback)
definitelyShardOne(dir, options, callback)
} else {
callback(null, dir)
}
}

function definitelyShardOne (oldDir, callback) {
function definitelyShardOne (oldDir, options, callback) {
const newDir = DirSharded({
root: oldDir.root,
dir: true,
Expand All @@ -57,7 +57,7 @@ function definitelyShardOne (oldDir, callback) {
path: oldDir.path,
dirty: oldDir.dirty,
flat: false
})
}, options)

oldDir.eachChildSeries(
(key, value, callback) => {
Expand Down
2 changes: 1 addition & 1 deletion src/importer/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ module.exports = function (ipldResolver, _options) {
return node
}),
treeBuilderStream
)
)

return {
sink: entry.sink,
Expand Down
11 changes: 5 additions & 6 deletions src/importer/tree-builder.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,14 @@ module.exports = createTreeBuilder

const defaultOptions = {
wrap: false,
shardSplitThreshold: 1000
shardSplitThreshold: 1000,
onlyHash: false
}

function createTreeBuilder (ipldResolver, _options) {
const options = Object.assign({}, defaultOptions, _options)

const queue = createQueue(consumeQueue, 1)

// returned stream
let stream = createStream()

Expand All @@ -32,7 +32,7 @@ function createTreeBuilder (ipldResolver, _options) {
dir: true,
dirty: false,
flat: true
})
}, options)

return {
flush: flushRoot,
Expand Down Expand Up @@ -101,7 +101,6 @@ function createTreeBuilder (ipldResolver, _options) {
currentPath += '/'
}
currentPath += pathElem

const last = (index === lastIndex)
parent.dirty = true
parent.multihash = null
Expand All @@ -110,7 +109,7 @@ function createTreeBuilder (ipldResolver, _options) {
if (last) {
waterfall([
(callback) => parent.put(pathElem, elem, callback),
(callback) => flatToShard(null, parent, options.shardSplitThreshold, callback),
(callback) => flatToShard(null, parent, options.shardSplitThreshold, options, callback),
(newRoot, callback) => {
tree = newRoot
callback()
Expand All @@ -131,7 +130,7 @@ function createTreeBuilder (ipldResolver, _options) {
path: currentPath,
dirty: true,
flat: true
})
}, options)
}
const parentDir = parent
parent = dir
Expand Down
1 change: 1 addition & 0 deletions test/browser.js
Original file line number Diff line number Diff line change
Expand Up @@ -52,4 +52,5 @@ describe('IPFS data importing tests on the Browser', function () {
require('./test-hash-parity-with-go-ipfs')(repo)
require('./test-nested-dir-import-export')(repo)
require('./test-dirbuilder-sharding')(repo)
require('./test-builder-only-hash')(repo)
})
1 change: 1 addition & 0 deletions test/node.js
Original file line number Diff line number Diff line change
Expand Up @@ -53,4 +53,5 @@ describe('IPFS UnixFS Engine', () => {
require('./test-nested-dir-import-export')(repo)
require('./test-dirbuilder-sharding')(repo)
require('./test-dag-api')
require('./test-builder-only-hash')(repo)
})
53 changes: 53 additions & 0 deletions test/test-builder-only-hash.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
/* eslint-env mocha */
'use strict'

const chai = require('chai')
chai.use(require('dirty-chai'))
const expect = chai.expect
const BlockService = require('ipfs-block-service')
const pull = require('pull-stream')
const IPLDResolver = require('ipld-resolver')
const CID = require('cids')
const createBuilder = require('../src/builder')
const FixedSizeChunker = require('../src/chunker/fixed-size')

module.exports = (repo) => {
describe('builder', () => {
let ipldResolver

before(() => {
const bs = new BlockService(repo)
ipldResolver = new IPLDResolver(bs)
})

it('will only chunk and hash if passed an "onlyHash" option', (done) => {
const onCollected = (err, nodes) => {
if (err) return done(err)

const node = nodes[0]
expect(node).to.exist()

ipldResolver.get(new CID(node.multihash), (err, res) => {
expect(err).to.exist()
done()
})
}

const content = String(Math.random() + Date.now())
const inputFile = {
path: content + '.txt',
content: Buffer.from(content)
}

const options = {
onlyHash: true
}

pull(
pull.values([inputFile]),
createBuilder(FixedSizeChunker, ipldResolver, options),
pull.collect(onCollected)
)
})
})
}
31 changes: 31 additions & 0 deletions test/test-importer.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ const sinon = require('sinon')
const BlockService = require('ipfs-block-service')
const pull = require('pull-stream')
const mh = require('multihashes')
const CID = require('cids')
const IPLDResolver = require('ipld-resolver')
const loadFixture = require('aegir/fixtures')

Expand Down Expand Up @@ -419,6 +420,36 @@ module.exports = (repo) => {
}
})

it('will not write to disk if passed "onlyHash" option', (done) => {
const content = String(Math.random() + Date.now())
const inputFile = {
path: content + '.txt',
content: Buffer.from(content)
}

const options = {
onlyHash: true
}

const onCollected = (err, files) => {
if (err) return done(err)

const file = files[0]
expect(file).to.exist()

ipldResolver.get(new CID(file.multihash), (err, res) => {
expect(err).to.exist()
done()
})
}

pull(
pull.values([inputFile]),
importer(ipldResolver, options),
pull.collect(onCollected)
)
})

it('will call an optional progress function', (done) => {
options.progress = sinon.spy()

Expand Down