From af4eb8562d6754a21c5dcf62190614bb04a058ff Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 14 May 2021 16:22:38 +0100 Subject: [PATCH 01/25] chore: upgrade repo to new multiformats module Replaces `cids`, `multibase`, `multihashes` etc with new `multiformats` module. BREAKING CHANGE: The blockstore now takes instances of the new CID class --- package.json | 13 +- src/blockstore-utils.js | 17 ++- src/blockstore.js | 85 +++++++----- src/idstore.js | 51 ++++---- src/index.js | 10 +- src/types.d.ts | 59 +-------- test/blockstore-test.js | 234 ++++++++++++++++++---------------- test/blockstore-utils-test.js | 9 +- test/interop-test.js | 18 +-- test/stat-test.js | 10 +- 10 files changed, 251 insertions(+), 255 deletions(-) diff --git a/package.json b/package.json index d086ac76..9e561cc9 100644 --- a/package.json +++ b/package.json @@ -22,7 +22,7 @@ "test:browser": "aegir test -t browser", "test:webworker": "aegir test -t webworker", "build": "aegir build", - "lint": "aegir lint", + "lint": "aegir ts -p check && aegir lint", "release": "aegir release", "release-minor": "aegir release --type minor", "release-major": "aegir release --type major", @@ -45,6 +45,8 @@ "npm": ">=6.0.0" }, "devDependencies": { + "@ipld/dag-cbor": "^5.0.5", + "@ipld/dag-pb": "^1.1.4", "@types/bytes": "^3.1.0", "@types/debug": "^4.1.5", "@types/memdown": "^3.0.0", @@ -59,7 +61,6 @@ "it-first": "^1.0.2", "just-range": "^2.1.0", "memdown": "^6.0.0", - "multihashing-async": "^2.1.0", "ncp": "^2.0.0", "process": "^0.11.10", "readable-stream": "^3.6.0", @@ -70,23 +71,21 @@ }, "dependencies": { "bytes": "^3.1.0", - "cids": "^1.1.6", "datastore-core": "^4.0.0", "datastore-fs": "^4.0.0", "datastore-level": "^5.0.0", "debug": "^4.1.0", "err-code": "^3.0.1", - "interface-datastore": "^4.0.0", + "interface-blockstore": "^0.0.3", + "interface-datastore": "ipfs/interface-datastore#feat/make-datastore-generic", "ipfs-repo-migrations": "^8.0.0", "ipfs-utils": "^7.0.0", - "ipld-block": "^0.11.0", "it-filter": "^1.0.2", "it-pushable": "^1.4.0", "just-safe-get": "^2.0.0", "just-safe-set": "^2.1.0", "merge-options": "^3.0.4", - "multibase": "^4.0.1", - "multihashes": "^4.0.2", + "multiformats": "^8.0.5", "p-queue": "^6.0.0", "proper-lockfile": "^4.0.0", "sort-keys": "^4.0.0", diff --git a/src/blockstore-utils.js b/src/blockstore-utils.js index 22b9ba2f..13611e22 100644 --- a/src/blockstore-utils.js +++ b/src/blockstore-utils.js @@ -1,10 +1,11 @@ 'use strict' const { Key } = require('interface-datastore') -const CID = require('cids') -const multibase = require('multibase') +const { CID } = require('multiformats') +const mhd = require('multiformats/hashes/digest') +const raw = require('multiformats/codecs/raw') const errcode = require('err-code') -const uint8ArrayToString = require('uint8arrays/to-string') +const { base32 } = require('multiformats/bases/base32') /** * Transform a cid to the appropriate datastore key. @@ -13,11 +14,13 @@ const uint8ArrayToString = require('uint8arrays/to-string') * @returns {Key} */ exports.cidToKey = cid => { - if (!CID.isCID(cid)) { + if (!(cid instanceof CID)) { throw errcode(new Error('Not a valid cid'), 'ERR_INVALID_CID') } - return new Key('/' + uint8ArrayToString(multibase.encode('base32', cid.multihash)).slice(1).toUpperCase(), false) + const encoded = base32.encode(cid.multihash.bytes) + + return new Key('/' + encoded.slice(1).toUpperCase(), false) } /** @@ -30,5 +33,7 @@ exports.cidToKey = cid => { */ exports.keyToCid = key => { // Block key is of the form / - return new CID(1, 'raw', multibase.decode('b' + key.toString().slice(1).toLowerCase())) + const digest = mhd.decode(base32.decode('b' + key.toString().slice(1).toLowerCase())) + + return CID.createV1(raw.code, digest) } diff --git a/src/blockstore.js b/src/blockstore.js index 0ea1619a..7062943c 100644 --- a/src/blockstore.js +++ b/src/blockstore.js @@ -1,16 +1,15 @@ 'use strict' const { shard, ShardingDatastore } = require('datastore-core') -const Block = require('ipld-block') const { cidToKey, keyToCid } = require('./blockstore-utils') const drain = require('it-drain') const pushable = require('it-pushable') + /** - * @typedef {import('interface-datastore').Query} Query + * @typedef {import('multiformats').CID} CID * @typedef {import('interface-datastore').Datastore} Datastore - * @typedef {import('interface-datastore').Options} DatastoreOptions - * @typedef {import('cids')} CID - * @typedef {import('./types').Blockstore} Blockstore + * @typedef {import('interface-store').Options} DatastoreOptions + * @typedef {import('interface-blockstore').Blockstore} Blockstore */ /** @@ -39,28 +38,56 @@ function maybeWithSharding (filestore, options) { * @returns {Blockstore} */ function createBaseStore (store) { - return { + /** @type {Blockstore} */ + const bs = { open () { return store.open() }, + close () { + return store.close() + }, + async * query (query, options) { - for await (const { key, value } of store.query(query, options)) { - yield new Block(value, keyToCid(key)) + /** @type {import('interface-datastore').Query} */ + const storeQuery = { + prefix: query.prefix, + limit: query.limit, + offset: query.offset, + filters: (query.filters || []).map(filter => { + return ({ key, value }) => filter({ key: keyToCid(key), value }) + }), + orders: (query.orders || []).map(order => { + return (a, b) => order({ key: keyToCid(a.key), value: a.value }, { key: keyToCid(b.key), value: b.value }) + }) + } + + for await (const { key, value } of store.query(storeQuery, options)) { + yield { key: keyToCid(key), value } } }, async * queryKeys (query, options) { - for await (const key of store.queryKeys(query, options)) { + /** @type {import('interface-datastore').KeyQuery} */ + const storeQuery = { + prefix: query.prefix, + limit: query.limit, + offset: query.offset, + filters: (query.filters || []).map(filter => { + return (key) => filter(keyToCid(key)) + }), + orders: (query.orders || []).map(order => { + return (a, b) => order(keyToCid(a), keyToCid(b)) + }) + } + + for await (const key of store.queryKeys(storeQuery, options)) { yield keyToCid(key) } }, async get (cid, options) { - const key = cidToKey(cid) - const blockData = await store.get(key, options) - - return new Block(blockData, cid) + return store.get(cidToKey(cid), options) }, async * getMany (cids, options) { @@ -69,22 +96,16 @@ function createBaseStore (store) { } }, - async put (block, options) { - if (!Block.isBlock(block)) { - throw new Error('invalid block') - } - - const key = cidToKey(block.cid) + async put (cid, buf, options) { + const key = cidToKey(cid) const exists = await store.has(key, options) if (!exists) { - await store.put(key, block.data, options) + await store.put(key, buf, options) } - - return block }, - async * putMany (blocks, options) { // eslint-disable-line require-await + async * putMany (pairs, options) { // eslint-disable-line require-await // we cannot simply chain to `store.putMany` because we convert a CID into // a key based on the multihash only, so we lose the version & codec and // cannot give the user back the CID they used to create the block, so yield @@ -102,17 +123,17 @@ function createBaseStore (store) { runner(async () => { try { await drain(store.putMany(async function * () { - for await (const block of blocks) { - const key = cidToKey(block.cid) + for await (const { key: cid, value } of pairs) { + const key = cidToKey(cid) const exists = await store.has(key, options) if (!exists) { - yield { key, value: block.data } + yield { key, value } } // there is an assumption here that after the yield has completed // the underlying datastore has finished writing the block - output.push(block) + output.push({ key: cid, value }) } }())) @@ -151,8 +172,14 @@ function createBaseStore (store) { return out }, - close () { - return store.close() + batch () { + return { + put (key, value) { }, + delete (key) { }, + commit: async (options) => { } + } } } + + return bs } diff --git a/src/idstore.js b/src/idstore.js index 11dfeae6..2272ce24 100644 --- a/src/idstore.js +++ b/src/idstore.js @@ -1,18 +1,17 @@ 'use strict' -const Block = require('ipld-block') const filter = require('it-filter') -const mh = require('multihashes') const pushable = require('it-pushable') const drain = require('it-drain') -const CID = require('cids') +const { CID } = require('multiformats') const errcode = require('err-code') +const { identity } = require('multiformats/hashes/identity') /** * @typedef {import('interface-datastore').Query} Query * @typedef {import('interface-datastore').Datastore} Datastore * @typedef {import('interface-datastore').Options} DatastoreOptions - * @typedef {import('./types').Blockstore} Blockstore + * @typedef {import('interface-blockstore').Blockstore} Blockstore */ /** @@ -31,6 +30,10 @@ function createIdStore (store) { return store.open() }, + close () { + return store.close() + }, + query (query, options) { return store.query(query, options) }, @@ -42,7 +45,7 @@ function createIdStore (store) { async get (cid, options) { const extracted = extractContents(cid) if (extracted.isIdentity) { - return Promise.resolve(new Block(extracted.digest, cid)) + return Promise.resolve(extracted.digest) } return store.get(cid, options) }, @@ -53,15 +56,17 @@ function createIdStore (store) { } }, - async put (block, options) { - const { isIdentity } = extractContents(block.cid) + async put (cid, buf, options) { + const { isIdentity } = extractContents(cid) + if (isIdentity) { - return Promise.resolve(block) + return } - return store.put(block, options) + + await store.put(cid, buf, options) }, - async * putMany (blocks, options) { + async * putMany (pairs, options) { // in order to return all blocks. we're going to assemble a seperate iterable // return rather than return the resolves of store.putMany using the same // process used by blockstore.putMany @@ -74,12 +79,13 @@ function createIdStore (store) { runner(async () => { try { await drain(store.putMany(async function * () { - for await (const block of blocks) { - if (!extractContents(block.cid).isIdentity) { - yield block + for await (const { key, value } of pairs) { + if (!extractContents(key).isIdentity) { + yield { key, value } } + // if non identity blocks successfully write, blocks are included in output - output.push(block) + output.push({ key, value }) } }())) @@ -112,8 +118,12 @@ function createIdStore (store) { return store.deleteMany(filter(cids, (cid) => !extractContents(cid).isIdentity), options) }, - close () { - return store.close() + batch () { + return { + put (key, value) { }, + delete (key) { }, + commit: async (options) => { } + } } } } @@ -123,14 +133,11 @@ function createIdStore (store) { * @returns {{ isIdentity: false } | { isIdentity: true, digest: Uint8Array}} */ function extractContents (k) { - if (!CID.isCID(k)) { + if (!(k instanceof CID)) { throw errcode(new Error('Not a valid cid'), 'ERR_INVALID_CID') } - // Pre-check by calling Prefix(), this much faster than extracting the hash. - const decoded = mh.decode(k.multihash) - - if (decoded.name !== 'identity') { + if (k.multihash.code !== identity.code) { return { isIdentity: false } @@ -138,6 +145,6 @@ function extractContents (k) { return { isIdentity: true, - digest: decoded.digest + digest: k.multihash.digest } } diff --git a/src/index.js b/src/index.js index b98b2033..00163a83 100644 --- a/src/index.js +++ b/src/index.js @@ -35,10 +35,9 @@ const lockers = { * @typedef {import('./types').Lock} Lock * @typedef {import('./types').LockCloser} LockCloser * @typedef {import('./types').Stat} Stat - * @typedef {import('./types').Blockstore} Blockstore * @typedef {import('./types').Config} Config - * @typedef {import('ipld-block')} Block * @typedef {import('interface-datastore').Datastore} Datastore + * @typedef {import('interface-blockstore').Blockstore} Blockstore */ /** @@ -401,11 +400,10 @@ class IpfsRepo { let size = BigInt(0) if (this.blocks) { - for await (const blockOrCid of this.blocks.query({})) { - const block = /** @type {Block} */(blockOrCid) + for await (const { key, value } of this.blocks.query({})) { count += BigInt(1) - size += BigInt(block.data.byteLength) - size += BigInt(block.cid.bytes.byteLength) + size += BigInt(value.byteLength) + size += BigInt(key.bytes.byteLength) } } diff --git a/src/types.d.ts b/src/types.d.ts index 48f95d48..cc7a42d1 100644 --- a/src/types.d.ts +++ b/src/types.d.ts @@ -1,7 +1,6 @@ -import type { Datastore, Options as DatastoreOptions, Query, KeyQuery, Key } from 'interface-datastore' -import type CID from 'cids' -import type Block from 'ipld-block' +import type { Datastore } from 'interface-datastore' +import type { CID } from 'multiformats' export type AwaitIterable = Iterable | AsyncIterable export type Await = Promise | T @@ -59,60 +58,6 @@ export interface Stat { repoSize: BigInt } -export interface Blockstore { - open: () => Promise - - /** - * Query the store - */ - query: (query: Query, options?: DatastoreOptions) => AsyncIterable - - /** - * Query the store, returning only keys - */ - queryKeys: (query: KeyQuery, options?: DatastoreOptions) => AsyncIterable - - /** - * Get a single block by CID - */ - get: (cid: CID, options?: DatastoreOptions) => Promise - - /** - * Like get, but for more - */ - getMany: (cids: AwaitIterable, options?: DatastoreOptions) => AsyncIterable - - /** - * Write a single block to the store - */ - put: (block: Block, options?: DatastoreOptions) => Promise - - /** - * Like put, but for more - */ - putMany: (blocks: AwaitIterable, options?: DatastoreOptions) => AsyncIterable - - /** - * Does the store contain block with this CID? - */ - has: (cid: CID, options?: DatastoreOptions) => Promise - - /** - * Delete a block from the store - */ - delete: (cid: CID, options?: DatastoreOptions) => Promise - - /** - * Delete a block from the store - */ - deleteMany: (cids: AwaitIterable, options?: DatastoreOptions) => AsyncIterable - - /** - * Close the store - */ - close: () => Promise -} - export interface Config { Addresses?: AddressConfig API?: APIConfig, diff --git a/test/blockstore-test.js b/test/blockstore-test.js index 557434f9..8a9e2b7d 100644 --- a/test/blockstore-test.js +++ b/test/blockstore-test.js @@ -3,10 +3,8 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const Block = require('ipld-block') -const CID = require('cids') +const { CID } = require('multiformats') const range = require('just-range') -const multihashing = require('multihashing-async') const tempDir = require('ipfs-utils/src/temp-dir') const { cidToKey } = require('../src/blockstore-utils') const IPFSRepo = require('../src') @@ -15,17 +13,26 @@ const all = require('it-all') const first = require('it-first') const uint8ArrayFromString = require('uint8arrays/from-string') const uint8ArrayToString = require('uint8arrays/to-string') +const uint8ArrayEquals = require('uint8arrays/equals') const { Adapter } = require('interface-datastore') - -async function makeBlock () { - const bData = uint8ArrayFromString(`hello-${Math.random()}`) - - const hash = await multihashing(bData, 'sha2-256') - return new Block(bData, new CID(hash)) +const { sha256 } = require('multiformats/hashes/sha2') +const { identity } = require('multiformats/hashes/identity') +const raw = require('multiformats/codecs/raw') +const dagCbor = require('@ipld/dag-cbor') +const dagPb = require('@ipld/dag-pb') +const mc = require('multicodec') + +async function makePair () { + const data = new TextEncoder().encode(`hello-${Math.random()}`) + const digest = await sha256.digest(data) + const cid = CID.create(1, raw.code, digest) + + return { key: cid, value: data } } /** * @typedef {import('interface-datastore').Key} Key + * @typedef {import('interface-blockstore').Pair} Pair */ /** @@ -38,15 +45,16 @@ module.exports = (repo) => { const bData = uint8ArrayFromString('hello world') const identityData = uint8ArrayFromString('A16461736466190144', 'base16upper') - /** @type {Block} */ - let b + /** @type { { key: CID, value: Uint8Array } } */ + let pair /** @type {CID} */ let identityCID + before(async () => { - const hash = await multihashing(bData, 'sha2-256') - b = new Block(bData, new CID(hash)) - const identityHash = await multihashing(identityData, 'identity') - identityCID = new CID(1, 'dag-cbor', identityHash) + const digest1 = await sha256.digest(bData) + pair = { key: CID.createV0(digest1), value: bData } + const digest2 = await identity.digest(identityData) + identityCID = CID.createV1(mc.IDENTITY, digest2) }) describe('.put', () => { @@ -60,34 +68,31 @@ module.exports = (repo) => { }) it('simple', async () => { - await repo.blocks.put(b) + await repo.blocks.put(pair.key, pair.value) }) it('does not write an identity block', async () => { - const identityBlock = new Block(identityData, identityCID) - await repo.blocks.put(identityBlock) + await repo.blocks.put(identityCID, identityData) const cids = await all(repo.blocks.queryKeys({})) - const rawCID = new CID(1, 'raw', identityCID.multihash) + const rawCID = CID.createV1(raw.code, identityCID.multihash) expect(cids).to.not.deep.include(rawCID) }) it('multi write (locks)', async () => { - await Promise.all([repo.blocks.put(b), repo.blocks.put(b)]) + await Promise.all([repo.blocks.put(pair.key, pair.value), repo.blocks.put(pair.key, pair.value)]) }) it('empty value', async () => { const d = new Uint8Array(0) - const multihash = await multihashing(d, 'sha2-256') - const empty = new Block(d, new CID(multihash)) - await repo.blocks.put(empty) + const digest = await sha256.digest(d) + await repo.blocks.put(CID.createV0(digest), d) }) it('massive multiwrite', async function () { this.timeout(15000) // add time for ci - const hashes = await Promise.all(range(100).map((i) => multihashing(blockData[i], 'sha2-256'))) + const hashes = await Promise.all(range(100).map((i) => sha256.digest(blockData[i]))) await Promise.all(range(100).map((i) => { - const block = new Block(blockData[i], new CID(hashes[i])) - return repo.blocks.put(block) + return repo.blocks.put(CID.createV0(hashes[i]), blockData[i]) })) }) @@ -95,28 +100,33 @@ module.exports = (repo) => { this.timeout(15000) // add time for ci const blocks = await Promise.all(range(50).map(async (i) => { const d = uint8ArrayFromString('many' + Math.random()) - const hash = await multihashing(d, 'sha2-256') - return new Block(d, new CID(hash)) + const digest = await sha256.digest(d) + return { key: CID.createV0(digest), value: d } })) const put = await all(repo.blocks.putMany(blocks)) expect(put).to.deep.equal(blocks) for (const block of blocks) { - const block1 = await repo.blocks.get(block.cid) - expect(block1).to.be.eql(block) + const block1 = await repo.blocks.get(block.key) + expect(block1).to.equalBytes(block.value) } }) it('.putMany with identity block included', async function () { const d = uint8ArrayFromString('many' + Math.random()) - const hash = await multihashing(d, 'sha2-256') - const blocks = [new Block(d, new CID(1, 'raw', hash)), new Block(identityData, identityCID)] + const digest = await sha256.digest(d) + + const blocks = [{ + key: CID.createV1(raw.code, digest), value: d + }, { + key: identityCID, value: identityData + }] const put = await all(repo.blocks.putMany(blocks)) expect(put).to.deep.equal(blocks) const cids = await all(repo.blocks.queryKeys({})) - expect(cids).to.deep.include(new CID(1, 'raw', hash)) - expect(cids).to.not.deep.include(new CID(1, 'raw', identityCID.multihash)) + expect(cids).to.deep.include(CID.createV1(raw.code, digest)) + expect(cids).to.not.deep.include(CID.createV1(raw.code, identityCID.multihash)) }) it('returns an error on invalid block', () => { @@ -136,17 +146,19 @@ module.exports = (repo) => { }) it('simple', async () => { - const block = await repo.blocks.get(b.cid) - expect(block).to.be.eql(b) + const block = await repo.blocks.get(pair.key) + expect(block).to.equalBytes(pair.value) }) it('massive read', async function () { this.timeout(15000) // add time for ci await Promise.all(range(20 * 100).map(async (i) => { const j = i % blockData.length - const hash = await multihashing(blockData[j], 'sha2-256') - const block = await repo.blocks.get(new CID(hash)) - expect(block.data).to.be.eql(blockData[j]) + const digest = await sha256.digest(blockData[j]) + const cid = CID.createV0(digest) + const block = await repo.blocks.get(cid) + + expect(block).to.equalBytes(blockData[j]) })) }) @@ -157,21 +169,21 @@ module.exports = (repo) => { it('should get block stored under v0 CID with a v1 CID', async () => { const data = uint8ArrayFromString(`TEST${Date.now()}`) - const hash = await multihashing(data, 'sha2-256') - const cid = new CID(hash) - await repo.blocks.put(new Block(data, cid)) + const digest = await sha256.digest(data) + const cid = CID.createV0(digest) + await repo.blocks.put(cid, data) const block = await repo.blocks.get(cid.toV1()) - expect(block.data).to.eql(data) + expect(block).to.equalBytes(data) }) it('should get block stored under v1 CID with a v0 CID', async () => { const data = uint8ArrayFromString(`TEST${Date.now()}`) - const hash = await multihashing(data, 'sha2-256') - const cid = new CID(1, 'dag-pb', hash) - await repo.blocks.put(new Block(data, cid)) + const digest = await sha256.digest(data) + const cid = CID.createV1(dagPb.code, digest) + await repo.blocks.put(cid, data) const block = await repo.blocks.get(cid.toV0()) - expect(block.data).to.eql(data) + expect(block).to.equalBytes(data) }) it('throws when passed an invalid cid', () => { @@ -181,8 +193,8 @@ module.exports = (repo) => { it('throws ERR_NOT_FOUND when requesting non-dag-pb CID that is not in the store', async () => { const data = uint8ArrayFromString(`TEST${Date.now()}`) - const hash = await multihashing(data, 'sha2-256') - const cid = new CID(1, 'dag-cbor', hash) + const digest = await sha256.digest(data) + const cid = CID.createV1(dagPb.code, digest) await expect(repo.blocks.get(cid)).to.eventually.be.rejected().with.property('code', 'ERR_NOT_FOUND') }) @@ -190,8 +202,8 @@ module.exports = (repo) => { it('throws unknown error encountered when getting a block', async () => { const err = new Error('wat') const data = uint8ArrayFromString(`TEST${Date.now()}`) - const hash = await multihashing(data, 'sha2-256') - const cid = new CID(hash) + const digest = await sha256.digest(data) + const cid = CID.createV0(digest) const key = cidToKey(cid) otherRepo = new IPFSRepo(tempDir(), { @@ -233,7 +245,7 @@ module.exports = (repo) => { it('can load an identity hash without storing first', async () => { const block = await repo.blocks.get(identityCID) - expect(block.data).to.be.eql(identityData) + expect(block).to.equalBytes(identityData) }) }) @@ -248,14 +260,14 @@ module.exports = (repo) => { }) it('simple', async () => { - const blocks = await all(repo.blocks.getMany([b.cid])) - expect(blocks).to.deep.include(b) + const blocks = await all(repo.blocks.getMany([pair.key])) + expect(blocks).to.deep.include(pair.value) }) - it('including a block with identity has', async () => { - const blocks = await all(repo.blocks.getMany([b.cid, identityCID])) - expect(blocks).to.deep.include(b) - expect(blocks).to.deep.include(new Block(identityData, identityCID)) + it('including a block with identity hash', async () => { + const blocks = await all(repo.blocks.getMany([pair.key, identityCID])) + expect(blocks).to.deep.include(pair.value) + expect(blocks).to.deep.include(identityData) }) it('massive read', async function () { @@ -265,9 +277,9 @@ module.exports = (repo) => { const blocks = await all(repo.blocks.getMany(async function * () { for (let i = 0; i < num; i++) { const j = i % blockData.length - const hash = await multihashing(blockData[j], 'sha2-256') + const digest = await sha256.digest(blockData[j]) - yield new CID(hash) + yield CID.createV0(digest) } }())) @@ -275,7 +287,7 @@ module.exports = (repo) => { for (let i = 0; i < num; i++) { const j = i % blockData.length - expect(blocks[i]).to.have.deep.property('data', blockData[j]) + expect(blocks[i]).to.equalBytes(blockData[j]) } }) @@ -286,21 +298,21 @@ module.exports = (repo) => { it('should get block stored under v0 CID with a v1 CID', async () => { const data = uint8ArrayFromString(`TEST${Date.now()}`) - const hash = await multihashing(data, 'sha2-256') - const cid = new CID(hash) - await repo.blocks.put(new Block(data, cid)) + const digest = await sha256.digest(data) + const cid = CID.createV0(digest) + await repo.blocks.put(cid, data) const block = await first(repo.blocks.getMany([cid.toV1()])) - expect(block && block.data).to.eql(data) + expect(block).to.equalBytes(data) }) it('should get block stored under v1 CID with a v0 CID', async () => { const data = uint8ArrayFromString(`TEST${Date.now()}`) - const hash = await multihashing(data, 'sha2-256') - const cid = new CID(1, 'dag-pb', hash) - await repo.blocks.put(new Block(data, cid)) + const digest = await sha256.digest(data) + const cid = CID.createV1(dagPb.code, digest) + await repo.blocks.put(cid, data) const block = await first(repo.blocks.getMany([cid.toV0()])) - expect(block && block.data).to.eql(data) + expect(block).to.equalBytes(data) }) it('throws when passed an invalid cid', () => { @@ -310,8 +322,8 @@ module.exports = (repo) => { it('throws ERR_NOT_FOUND when requesting non-dag-pb CID that is not in the store', async () => { const data = uint8ArrayFromString(`TEST${Date.now()}`) - const hash = await multihashing(data, 'sha2-256') - const cid = new CID(1, 'dag-cbor', hash) + const digest = await sha256.digest(data) + const cid = CID.createV1(dagCbor.code, digest) await expect(drain(repo.blocks.getMany([cid]))).to.eventually.be.rejected().with.property('code', 'ERR_NOT_FOUND') }) @@ -319,8 +331,8 @@ module.exports = (repo) => { it('throws unknown error encountered when getting a block', async () => { const err = new Error('wat') const data = uint8ArrayFromString(`TEST${Date.now()}`) - const hash = await multihashing(data, 'sha2-256') - const cid = new CID(hash) + const digest = await sha256.digest(data) + const cid = CID.createV0(digest) const key = cidToKey(cid) otherRepo = new IPFSRepo(tempDir(), { @@ -371,7 +383,7 @@ module.exports = (repo) => { describe('.has', () => { it('existing block', async () => { - const exists = await repo.blocks.has(b.cid) + const exists = await repo.blocks.has(pair.key) expect(exists).to.eql(true) }) @@ -381,15 +393,15 @@ module.exports = (repo) => { }) it('non existent block', async () => { - const exists = await repo.blocks.has(new CID('QmbcpFjzamCj5ZZdduW32ctWUPvbGMwQZk2ghWK6PrKswE')) + const exists = await repo.blocks.has(CID.parse('QmbcpFjzamCj5ZZdduW32ctWUPvbGMwQZk2ghWK6PrKswE')) expect(exists).to.eql(false) }) it('should have block stored under v0 CID with a v1 CID', async () => { const data = uint8ArrayFromString(`TEST${Date.now()}`) - const hash = await multihashing(data, 'sha2-256') - const cid = new CID(hash) - await repo.blocks.put(new Block(data, cid)) + const digest = await sha256.digest(data) + const cid = CID.createV0(digest) + await repo.blocks.put(cid, data) const exists = await repo.blocks.has(cid.toV1()) expect(exists).to.eql(true) }) @@ -397,10 +409,10 @@ module.exports = (repo) => { it('should have block stored under v1 CID with a v0 CID', async () => { const data = uint8ArrayFromString(`TEST${Date.now()}`) - const hash = await multihashing(data, 'sha2-256') - const cid = new CID(1, 'dag-pb', hash) - await repo.blocks.put(new Block(data, cid)) - const exists = await repo.blocks.has(cid.toV0()) + const digest = await sha256.digest(data) + const cid = CID.createV1(dagCbor.code, digest) + await repo.blocks.put(cid, data) + const exists = await repo.blocks.has(CID.createV0(digest)) expect(exists).to.eql(true) }) @@ -411,8 +423,8 @@ module.exports = (repo) => { it('returns false when requesting non-dag-pb CID that is not in the store', async () => { const data = uint8ArrayFromString(`TEST${Date.now()}`) - const hash = await multihashing(data, 'sha2-256') - const cid = new CID(1, 'dag-cbor', hash) + const digest = await sha256.digest(data) + const cid = CID.createV1(dagCbor.code, digest) const result = await repo.blocks.has(cid) expect(result).to.be.false() @@ -421,8 +433,8 @@ module.exports = (repo) => { describe('.delete', () => { it('simple', async () => { - await repo.blocks.delete(b.cid) - const exists = await repo.blocks.has(b.cid) + await repo.blocks.delete(pair.key) + const exists = await repo.blocks.has(pair.key) expect(exists).to.equal(false) }) @@ -440,58 +452,62 @@ module.exports = (repo) => { describe('.deleteMany', () => { it('simple', async () => { - const deleted = await all(repo.blocks.deleteMany([b.cid])) - const exists = await repo.blocks.has(b.cid) + const deleted = await all(repo.blocks.deleteMany([pair.key])) + const exists = await repo.blocks.has(pair.key) expect(exists).to.equal(false) expect(deleted).to.have.lengthOf(1) - expect(deleted[0]).to.deep.equal(b.cid) + expect(deleted[0]).to.deep.equal(pair.key) }) it('including identity cid', async () => { - await drain(repo.blocks.deleteMany([b.cid, identityCID])) - const exists = await repo.blocks.has(b.cid) + await drain(repo.blocks.deleteMany([pair.key, identityCID])) + const exists = await repo.blocks.has(pair.key) expect(exists).to.equal(false) const identityExists = await repo.blocks.has(identityCID) expect(identityExists).to.equal(true) }) it('throws when passed an invalid cid', () => { + // @ts-expect-error invalid key type return expect(drain(repo.blocks.deleteMany(['foo']))).to.eventually.be.rejected().with.property('code', 'ERR_INVALID_CID') }) }) describe('.query', () => { - /** @type {Block} */ - let block1 - /** @type {Block} */ - let block2 + /** @type {Pair} */ + let pair1 + /** @type {Pair} */ + let pair2 before(async () => { - block1 = await makeBlock() - block2 = await makeBlock() + pair1 = await makePair() + pair2 = await makePair() - await repo.blocks.put(block1) - await repo.blocks.put(block2) + await repo.blocks.put(pair1.key, pair1.value) + await repo.blocks.put(pair2.key, pair2.value) }) it('returns key/values for block data', async () => { - const blocks = await all(repo.blocks.query({})) - const block = blocks.find(block => uint8ArrayToString(block.data, 'base64') === uint8ArrayToString(block1.data, 'base64')) + const blocks = await all(repo.blocks.query({ + filters: [ + ({ value }) => uint8ArrayEquals(value, pair1.value) + ] + })) - expect(block).to.be.ok() - expect(block && block.cid.multihash).to.deep.equal(block1.cid.multihash) - expect(block && block.data).to.deep.equal(block1.data) + expect(blocks).to.have.lengthOf(1) + expect(blocks[0]).to.have.nested.property('key.bytes').that.equalBytes(pair1.key.bytes) + expect(blocks[0]).to.have.property('value').that.equalBytes(pair1.value) }) it('returns some of the blocks', async () => { const blocksWithPrefix = await all(repo.blocks.query({ - prefix: cidToKey(block1.cid).toString().substring(0, 10) + prefix: cidToKey(pair1.key).toString().substring(0, 10) })) - const block = blocksWithPrefix.find(block => uint8ArrayToString(block.data, 'base64') === uint8ArrayToString(block1.data, 'base64')) + const block = blocksWithPrefix.find(({ key, value }) => uint8ArrayToString(value, 'base64') === uint8ArrayToString(pair1.value, 'base64')) expect(block).to.be.ok() - expect(block && block.cid.multihash).to.deep.equal(block1.cid.multihash) - expect(block && block.data).to.deep.equal(block1.data) + expect(block?.key.bytes).to.equalBytes(pair1.key.bytes) + expect(block?.value).to.equalBytes(pair1.value) const allBlocks = await all(repo.blocks.query({})) expect(blocksWithPrefix.length).to.be.lessThan(allBlocks.length) @@ -503,7 +519,7 @@ module.exports = (repo) => { expect(cids.length).to.be.greaterThan(0) for (const cid of cids) { - expect(CID.isCID(cid)).to.be.true() + expect(CID.asCID(cid)).to.be.ok() } }) }) diff --git a/test/blockstore-utils-test.js b/test/blockstore-utils-test.js index f774e608..7390ff4b 100644 --- a/test/blockstore-utils-test.js +++ b/test/blockstore-utils-test.js @@ -3,14 +3,15 @@ const { expect } = require('aegir/utils/chai') const { Key } = require('interface-datastore') -const CID = require('cids') +const { CID } = require('multiformats') const Repo = require('../src') +const raw = require('multiformats/codecs/raw') module.exports = () => { describe('blockstore utils', () => { it('converts a CID to a datastore Key and back', () => { // CIDv1 in base32 with IPLD raw codec - const originalCid = new CID('bafkreihkb3vrxxex5zvzkr3s3a6noe223r7jka4ofjy2nkzu27kueg76ii') + const originalCid = CID.parse('bafkreihkb3vrxxex5zvzkr3s3a6noe223r7jka4ofjy2nkzu27kueg76ii') const key = Repo.utils.blockstore.cidToKey(originalCid) expect(key instanceof Key).to.be.true() const cid = Repo.utils.blockstore.keyToCid(key) @@ -20,14 +21,14 @@ module.exports = () => { it('converts a CID to base32 encoded key', () => { // CIDv0 in base58btc with implicit dag-pb codec - const originalCid = new CID('QmQPeNsJPyVWPFDVHb77w8G42Fvo15z4bG2X8D2GhfbSXc') + const originalCid = CID.parse('QmQPeNsJPyVWPFDVHb77w8G42Fvo15z4bG2X8D2GhfbSXc') const key = Repo.utils.blockstore.cidToKey(originalCid) expect(key instanceof Key).to.be.true() expect(key.toString()).to.equal('/CIQB4655YD5GLBB7WWEUAHCO6QONU5ICBONAA5JEPBIOEIVZ5RXTIYY') const cid = Repo.utils.blockstore.keyToCid(key) expect(cid instanceof CID).to.be.true() expect('bafkreia6po64b6tfqq73lckadrhpihg2oubaxgqaoushquhcek46y3zumm').to.equal(cid.toString()) - expect(cid.codec).to.equal('raw') + expect(cid.code).to.equal(raw.code) expect(cid.version).to.equal(1) expect(cid.multihash).to.deep.equal(originalCid.multihash) }) diff --git a/test/interop-test.js b/test/interop-test.js index c4324fd0..d74ca186 100644 --- a/test/interop-test.js +++ b/test/interop-test.js @@ -2,10 +2,10 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const mh = require('multihashing-async').multihash -const CID = require('cids') +const { CID } = require('multiformats') const Key = require('interface-datastore').Key const uint8ArrayToString = require('uint8arrays/to-string') +const uint8ArrayFromString = require('uint8arrays/from-string') /** * @param {import('../src/index')} repo @@ -13,23 +13,23 @@ const uint8ArrayToString = require('uint8arrays/to-string') module.exports = (repo) => { describe('interop', () => { it('reads welcome-to-ipfs', async () => { - const welcomeHash = mh.fromHexString( - '1220120f6af601d46e10b2d2e11ed71c55d25f3042c22501e41d1246e7a1e9d3d8ec' + const welcomeHash = CID.decode( + uint8ArrayFromString('1220120f6af601d46e10b2d2e11ed71c55d25f3042c22501e41d1246e7a1e9d3d8ec', 'base16') ) - const val = await repo.blocks.get(new CID(welcomeHash)) - expect(uint8ArrayToString(val.data)).to.match(/Hello and Welcome to IPFS/) + const val = await repo.blocks.get(welcomeHash) + expect(uint8ArrayToString(val)).to.match(/Hello and Welcome to IPFS/) }) it('reads a bunch of blocks', async () => { const cids = [ 'QmUxpzJGJYTK5AzH36jV9ucM2WdF5KhjANb4FAhqnREzuC', 'QmQbb26h9dcU5iNPMNEzYZnZN9YLTXBtFwuHmmo6YU4Aig' - ].map((hash) => new CID(mh.fromB58String(hash))) + ].map((hash) => CID.parse(hash)) - const values = await Promise.all(cids.map((cid) => repo.blocks?.get(cid))) + const values = await Promise.all(cids.map((cid) => repo.blocks.get(cid))) expect(values.length).to.equal(2) - expect(values.map((value) => value.data.length)).to.eql([2659, 12783]) + expect(values.map((value) => value.length)).to.eql([2659, 12783]) }) it('reads DHT records from the datastore', async () => { diff --git a/test/stat-test.js b/test/stat-test.js index 7ebf0cd9..9fde8146 100644 --- a/test/stat-test.js +++ b/test/stat-test.js @@ -2,8 +2,7 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const Block = require('ipld-block') -const CID = require('cids') +const { CID } = require('multiformats') const uint8ArrayFromString = require('uint8arrays/from-string') /** * @param {import('../src/index')} repo @@ -11,11 +10,10 @@ const uint8ArrayFromString = require('uint8arrays/from-string') module.exports = (repo) => { describe('stat', () => { before(async () => { - const data = new Block( - uint8ArrayFromString('foo'), - new CID('bafyreighz6vdlkdsvp4nu3lxhsofnk2eqxn6o57ag3mfxkqa7c327djhra') + await repo.blocks.put( + CID.parse('bafyreighz6vdlkdsvp4nu3lxhsofnk2eqxn6o57ag3mfxkqa7c327djhra'), + uint8ArrayFromString('foo') ) - await repo.blocks.put(data) }) it('get stats', async () => { From eda6b69fe0a6cea189521bb9919b2b33b56ade7c Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 14 May 2021 16:28:57 +0100 Subject: [PATCH 02/25] chore: remove mc ref --- test/blockstore-test.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/test/blockstore-test.js b/test/blockstore-test.js index 8a9e2b7d..bf02d7ef 100644 --- a/test/blockstore-test.js +++ b/test/blockstore-test.js @@ -20,7 +20,6 @@ const { identity } = require('multiformats/hashes/identity') const raw = require('multiformats/codecs/raw') const dagCbor = require('@ipld/dag-cbor') const dagPb = require('@ipld/dag-pb') -const mc = require('multicodec') async function makePair () { const data = new TextEncoder().encode(`hello-${Math.random()}`) @@ -54,7 +53,7 @@ module.exports = (repo) => { const digest1 = await sha256.digest(bData) pair = { key: CID.createV0(digest1), value: bData } const digest2 = await identity.digest(identityData) - identityCID = CID.createV1(mc.IDENTITY, digest2) + identityCID = CID.createV1(identity.code, digest2) }) describe('.put', () => { From 749376f305366a0bbe17712390c3dc99cae04533 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 14 May 2021 16:29:55 +0100 Subject: [PATCH 03/25] chore: update bundle size --- .aegir.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.aegir.js b/.aegir.js index a04eff0f..8245a4b7 100644 --- a/.aegir.js +++ b/.aegir.js @@ -26,7 +26,7 @@ module.exports = { } }, build: { - bundlesizeMax: '130kB', + bundlesizeMax: '111kB', config: esbuild } } From e5da909a666144245c8367a035b5d851af82fea4 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 14 May 2021 16:54:14 +0100 Subject: [PATCH 04/25] chore: update node version --- .github/workflows/main.yml | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 8c9c0ed8..8ced0bc3 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -13,8 +13,7 @@ jobs: steps: - uses: actions/checkout@v2 - run: npm install - - run: npx aegir lint - - run: npx aegir ts -p check + - run: npm run lint # or # - uses: gozala/typescript-error-reporter-action@v1.0.8 - run: npx aegir build @@ -28,7 +27,7 @@ jobs: strategy: matrix: os: [windows-latest, ubuntu-latest, macos-latest] - node: [14, 15] + node: [14, 16] fail-fast: true steps: - uses: actions/checkout@v2 @@ -45,7 +44,7 @@ jobs: - uses: actions/checkout@v2 - uses: microsoft/playwright-github-action@v1 - run: npm install - - run: npx aegir test -t browser -t webworker --bail # add --cov later when its fixed + - run: npx aegir test -t browser -t webworker --bail # add --cov later when its fixed - uses: codecov/codecov-action@v1 test-firefox: needs: check From b9fe4b124180f71b5f3b38725652011176a08555 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Sat, 15 May 2021 13:36:57 +0100 Subject: [PATCH 05/25] chore: update deps --- .aegir.js | 26 +------------------------- package.json | 6 ++---- scripts/node-globals.js | 3 --- 3 files changed, 3 insertions(+), 32 deletions(-) delete mode 100644 scripts/node-globals.js diff --git a/.aegir.js b/.aegir.js index 8245a4b7..638e5694 100644 --- a/.aegir.js +++ b/.aegir.js @@ -1,32 +1,8 @@ 'use strict' -const path = require('path') - -/** @type {import('aegir').Options["build"]["config"]} */ -const esbuild = { - inject: [path.join(__dirname, 'scripts/node-globals.js')], - plugins: [ - { - name: 'node built ins', - setup (build) { - build.onResolve({ filter: /^stream$/ }, () => { - return { path: require.resolve('readable-stream') } - }) - } - } - ] -} /** @type {import('aegir').PartialOptions} */ module.exports = { - test: { - browser: { - config: { - buildConfig: esbuild - } - } - }, build: { - bundlesizeMax: '111kB', - config: esbuild + bundlesizeMax: '93kB' } } diff --git a/package.json b/package.json index 9e561cc9..91994811 100644 --- a/package.json +++ b/package.json @@ -62,8 +62,6 @@ "just-range": "^2.1.0", "memdown": "^6.0.0", "ncp": "^2.0.0", - "process": "^0.11.10", - "readable-stream": "^3.6.0", "rimraf": "^3.0.0", "sinon": "^10.0.0", "url": "^0.11.0", @@ -76,9 +74,9 @@ "datastore-level": "^5.0.0", "debug": "^4.1.0", "err-code": "^3.0.1", - "interface-blockstore": "^0.0.3", + "interface-blockstore": "^0.0.4", "interface-datastore": "ipfs/interface-datastore#feat/make-datastore-generic", - "ipfs-repo-migrations": "^8.0.0", + "ipfs-repo-migrations": "ipfs/js-ipfs-repo-migrations#chore/upgrade-multiformats", "ipfs-utils": "^7.0.0", "it-filter": "^1.0.2", "it-pushable": "^1.4.0", diff --git a/scripts/node-globals.js b/scripts/node-globals.js deleted file mode 100644 index d312b5a2..00000000 --- a/scripts/node-globals.js +++ /dev/null @@ -1,3 +0,0 @@ -// @ts-nocheck -export const { Buffer } = require('buffer') -export const process = require('process/browser') \ No newline at end of file From 3b3c44c9d93322b9b7286f2046668651120687a6 Mon Sep 17 00:00:00 2001 From: Alex Potsides Date: Mon, 21 Jun 2021 15:28:55 +0100 Subject: [PATCH 06/25] chore: pr comments Co-authored-by: Vasco Santos Co-authored-by: Rod Vagg --- .github/workflows/main.yml | 4 ++-- src/blockstore-utils.js | 3 ++- src/blockstore.js | 6 +++--- src/idstore.js | 5 +++-- test/blockstore-utils-test.js | 2 +- test/interop-test.js | 2 +- test/stat-test.js | 2 +- 7 files changed, 13 insertions(+), 11 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 8ced0bc3..398e9884 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -44,7 +44,7 @@ jobs: - uses: actions/checkout@v2 - uses: microsoft/playwright-github-action@v1 - run: npm install - - run: npx aegir test -t browser -t webworker --bail # add --cov later when its fixed + - run: npx aegir test -t browser -t webworker --bail - uses: codecov/codecov-action@v1 test-firefox: needs: check @@ -75,4 +75,4 @@ jobs: # steps: # - uses: actions/checkout@v2 # - run: npm install - # - run: npx xvfb-maybe aegir test -t electron-renderer --bail \ No newline at end of file + # - run: npx xvfb-maybe aegir test -t electron-renderer --bail diff --git a/src/blockstore-utils.js b/src/blockstore-utils.js index 13611e22..32b605c0 100644 --- a/src/blockstore-utils.js +++ b/src/blockstore-utils.js @@ -14,7 +14,8 @@ const { base32 } = require('multiformats/bases/base32') * @returns {Key} */ exports.cidToKey = cid => { - if (!(cid instanceof CID)) { + cid = CID.asCID(cid) + if (cid == null) { throw errcode(new Error('Not a valid cid'), 'ERR_INVALID_CID') } diff --git a/src/blockstore.js b/src/blockstore.js index 7062943c..1f097757 100644 --- a/src/blockstore.js +++ b/src/blockstore.js @@ -6,7 +6,7 @@ const drain = require('it-drain') const pushable = require('it-pushable') /** - * @typedef {import('multiformats').CID} CID + * @typedef {import('multiformats/cid').CID} CID * @typedef {import('interface-datastore').Datastore} Datastore * @typedef {import('interface-store').Options} DatastoreOptions * @typedef {import('interface-blockstore').Blockstore} Blockstore @@ -49,7 +49,7 @@ function createBaseStore (store) { }, async * query (query, options) { - /** @type {import('interface-datastore').Query} */ + /** @type {import('interface-blockstore').Query} */ const storeQuery = { prefix: query.prefix, limit: query.limit, @@ -68,7 +68,7 @@ function createBaseStore (store) { }, async * queryKeys (query, options) { - /** @type {import('interface-datastore').KeyQuery} */ + /** @type {import('interface-blockstore').KeyQuery} */ const storeQuery = { prefix: query.prefix, limit: query.limit, diff --git a/src/idstore.js b/src/idstore.js index 2272ce24..cb306cc2 100644 --- a/src/idstore.js +++ b/src/idstore.js @@ -3,7 +3,7 @@ const filter = require('it-filter') const pushable = require('it-pushable') const drain = require('it-drain') -const { CID } = require('multiformats') +const { CID } = require('multiformats/cid') const errcode = require('err-code') const { identity } = require('multiformats/hashes/identity') @@ -133,7 +133,8 @@ function createIdStore (store) { * @returns {{ isIdentity: false } | { isIdentity: true, digest: Uint8Array}} */ function extractContents (k) { - if (!(k instanceof CID)) { + cid = CID.asCID(cid) + if (cid == null) { throw errcode(new Error('Not a valid cid'), 'ERR_INVALID_CID') } diff --git a/test/blockstore-utils-test.js b/test/blockstore-utils-test.js index 7390ff4b..7ddad0fe 100644 --- a/test/blockstore-utils-test.js +++ b/test/blockstore-utils-test.js @@ -3,7 +3,7 @@ const { expect } = require('aegir/utils/chai') const { Key } = require('interface-datastore') -const { CID } = require('multiformats') +const { CID } = require('multiformats/cid') const Repo = require('../src') const raw = require('multiformats/codecs/raw') diff --git a/test/interop-test.js b/test/interop-test.js index d74ca186..43e5dd62 100644 --- a/test/interop-test.js +++ b/test/interop-test.js @@ -2,7 +2,7 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const { CID } = require('multiformats') +const { CID } = require('multiformats/cid') const Key = require('interface-datastore').Key const uint8ArrayToString = require('uint8arrays/to-string') const uint8ArrayFromString = require('uint8arrays/from-string') diff --git a/test/stat-test.js b/test/stat-test.js index 9fde8146..dac0e24a 100644 --- a/test/stat-test.js +++ b/test/stat-test.js @@ -2,7 +2,7 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const { CID } = require('multiformats') +const { CID } = require('multiformats/cid') const uint8ArrayFromString = require('uint8arrays/from-string') /** * @param {import('../src/index')} repo From 83ee0b05b89e23d5029e47856c26fd7a51bfd473 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Mon, 21 Jun 2021 16:32:28 +0100 Subject: [PATCH 07/25] chore: update pr --- package.json | 2 +- src/blockstore-utils.js | 7 ++++--- src/blockstore.js | 4 ++-- src/idstore.js | 7 ++++--- 4 files changed, 11 insertions(+), 9 deletions(-) diff --git a/package.json b/package.json index 91994811..a53d853e 100644 --- a/package.json +++ b/package.json @@ -75,7 +75,7 @@ "debug": "^4.1.0", "err-code": "^3.0.1", "interface-blockstore": "^0.0.4", - "interface-datastore": "ipfs/interface-datastore#feat/make-datastore-generic", + "interface-datastore": "^4.0.2", "ipfs-repo-migrations": "ipfs/js-ipfs-repo-migrations#chore/upgrade-multiformats", "ipfs-utils": "^7.0.0", "it-filter": "^1.0.2", diff --git a/src/blockstore-utils.js b/src/blockstore-utils.js index 32b605c0..80ef8f46 100644 --- a/src/blockstore-utils.js +++ b/src/blockstore-utils.js @@ -10,11 +10,12 @@ const { base32 } = require('multiformats/bases/base32') /** * Transform a cid to the appropriate datastore key. * - * @param {CID} cid + * @param {CID} c * @returns {Key} */ -exports.cidToKey = cid => { - cid = CID.asCID(cid) +exports.cidToKey = c => { + const cid = CID.asCID(c) + if (cid == null) { throw errcode(new Error('Not a valid cid'), 'ERR_INVALID_CID') } diff --git a/src/blockstore.js b/src/blockstore.js index 1f097757..2a3aecda 100644 --- a/src/blockstore.js +++ b/src/blockstore.js @@ -49,7 +49,7 @@ function createBaseStore (store) { }, async * query (query, options) { - /** @type {import('interface-blockstore').Query} */ + /** @type {import('interface-datastore').Query} */ const storeQuery = { prefix: query.prefix, limit: query.limit, @@ -68,7 +68,7 @@ function createBaseStore (store) { }, async * queryKeys (query, options) { - /** @type {import('interface-blockstore').KeyQuery} */ + /** @type {import('interface-datastore').KeyQuery} */ const storeQuery = { prefix: query.prefix, limit: query.limit, diff --git a/src/idstore.js b/src/idstore.js index cb306cc2..6afe8cad 100644 --- a/src/idstore.js +++ b/src/idstore.js @@ -133,12 +133,13 @@ function createIdStore (store) { * @returns {{ isIdentity: false } | { isIdentity: true, digest: Uint8Array}} */ function extractContents (k) { - cid = CID.asCID(cid) + const cid = CID.asCID(k) + if (cid == null) { throw errcode(new Error('Not a valid cid'), 'ERR_INVALID_CID') } - if (k.multihash.code !== identity.code) { + if (cid.multihash.code !== identity.code) { return { isIdentity: false } @@ -146,6 +147,6 @@ function extractContents (k) { return { isIdentity: true, - digest: k.multihash.digest + digest: cid.multihash.digest } } From 91bc2a47a27e76a162be01686552acdf87e94e24 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Tue, 22 Jun 2021 09:37:59 +0100 Subject: [PATCH 08/25] chore: update deps --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index a53d853e..724ea93e 100644 --- a/package.json +++ b/package.json @@ -74,7 +74,7 @@ "datastore-level": "^5.0.0", "debug": "^4.1.0", "err-code": "^3.0.1", - "interface-blockstore": "^0.0.4", + "interface-blockstore": "^0.0.5", "interface-datastore": "^4.0.2", "ipfs-repo-migrations": "ipfs/js-ipfs-repo-migrations#chore/upgrade-multiformats", "ipfs-utils": "^7.0.0", @@ -83,7 +83,7 @@ "just-safe-get": "^2.0.0", "just-safe-set": "^2.1.0", "merge-options": "^3.0.4", - "multiformats": "^8.0.5", + "multiformats": "^9.0.4", "p-queue": "^6.0.0", "proper-lockfile": "^4.0.0", "sort-keys": "^4.0.0", From fc7d4fadd6f4c2f5052f7c21dc94433d75ce2688 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 24 Jun 2021 16:21:32 +0100 Subject: [PATCH 09/25] chore: move pin manager to repo --- src/default-options.js | 7 +- src/index.js | 10 +- src/pins.js | 388 +++++++++++++++++++++++++++++++++++++++++ src/types.d.ts | 15 ++ test/pins-test.js | 18 +- 5 files changed, 424 insertions(+), 14 deletions(-) create mode 100644 src/pins.js diff --git a/src/default-options.js b/src/default-options.js index 19089ffc..694a68d9 100644 --- a/src/default-options.js +++ b/src/default-options.js @@ -5,7 +5,7 @@ /** * @type {Required} */ -module.exports = { +const defaultOptions = { autoMigrate: true, onMigrationProgress: () => {}, lock: 'fs', @@ -26,5 +26,10 @@ module.exports = { }, keys: { } + }, + codecLoader: { + getCodec: (codeOrName) => Promise.reject(new Error(`Could not load codec for "${codeOrName}"`)) } } + +module.exports = defaultOptions \ No newline at end of file diff --git a/src/index.js b/src/index.js index 00163a83..c2262909 100644 --- a/src/index.js +++ b/src/index.js @@ -18,6 +18,7 @@ const idstore = require('./idstore') const defaultOptions = require('./default-options') const defaultDatastore = require('./default-datastore') const ERRORS = require('./errors') +const Pins = require('./pins') const log = debug('ipfs:repo') @@ -48,7 +49,7 @@ class IpfsRepo { * @param {string} repoPath - path where the repo is stored * @param {Options} [options] - Configuration */ - constructor (repoPath, options = {}) { + constructor (repoPath, options) { if (typeof repoPath !== 'string') { throw new Error('missing repoPath') } @@ -64,10 +65,11 @@ class IpfsRepo { this.root = backends.create('root', this.path, this.options) this.datastore = backends.create('datastore', pathJoin(this.path, 'datastore'), this.options) this.keys = backends.create('keys', pathJoin(this.path, 'keys'), this.options) - this.pins = backends.create('pins', pathJoin(this.path, 'pins'), this.options) const blocksBaseStore = backends.create('blocks', pathJoin(this.path, 'blocks'), this.options) const blockStore = blockstore(blocksBaseStore, this.options.storageBackendOptions.blocks) this.blocks = idstore(blockStore) + const pinstore = backends.create('pins', pathJoin(this.path, 'pins'), this.options) + this.pins = new Pins({ pinstore, blockstore: this.blocks, codecs: this.options.codecLoader}) this.version = version(this.root) this.config = config(this.root) this.spec = spec(this.root) @@ -152,7 +154,7 @@ class IpfsRepo { await this.keys.open() log('creating pins') - await this.pins.open() + await this.pins.pinstore.open() this.closed = false log('all opened') @@ -287,7 +289,7 @@ class IpfsRepo { this.blocks, this.keys, this.datastore, - this.pins + this.pins.pinstore ].map((store) => store && store.close())) log('unlocking') diff --git a/src/pins.js b/src/pins.js new file mode 100644 index 00000000..72970b93 --- /dev/null +++ b/src/pins.js @@ -0,0 +1,388 @@ +/* eslint max-nested-callbacks: ["error", 8] */ +'use strict' + +const { CID } = require('multiformats/cid') +const errCode = require('err-code') +const debug = require('debug') +const first = require('it-first') +const all = require('it-all') +const cborg = require('cborg') +const { Key } = require('interface-datastore') +const dagPb = require('@ipld/dag-pb') +const dagCbor = require('@ipld/dag-cbor') +const { base32 } = require('multiformats/bases/base32') +const Digest = require('multiformats/hashes/digest') + +/** + * @typedef {object} Pin + * @property {number} depth + * @property {import('multiformats/cid').CIDVersion} [version] + * @property {number} [codec] + * @property {Record} [metadata] + */ + +/** + * @typedef {import('./types').PinType} PinType + * @typedef {import('./types').PinQueryType} PinQueryType + * @typedef {import('multiformats/codecs/interface').BlockCodec} BlockCodec + */ + +/** + * @typedef {Object} PinOptions + * @property {any} [metadata] + * + * @typedef {import('./types').AbortOptions} AbortOptions + */ + +/** + * @param {string} type + */ +function invalidPinTypeErr (type) { + const errMsg = `Invalid type '${type}', must be one of {direct, indirect, recursive, all}` + return errCode(new Error(errMsg), 'ERR_INVALID_PIN_TYPE') +} + +/** + * @param {CID} cid + */ +function cidToKey (cid) { + return new Key(`/${base32.encode(cid.multihash.bytes).toUpperCase()}`) +} + +/** + * @param {Key | string} key + */ +function keyToMultihash (key) { + return Digest.decode(base32.decode(key.toString().toLowerCase().slice(1))) +} + +/** + * @param {any} obj + * @param {string[]} path + * @param {boolean} parseBuffer + * @returns {Generator<[string, CID], void, undefined>} + */ +function * dagCborLinks (obj, path = [], parseBuffer = true) { + if (parseBuffer && Buffer.isBuffer(obj)) { + obj = cborg.decode(obj) + } + + for (let key of Object.keys(obj)) { + let _path = path.slice() + _path.push(key) + let val = obj[key] + + if (val && typeof val === 'object') { + if (Array.isArray(val)) { + for (let i = 0; i < val.length; i++) { + let __path = _path.slice() + __path.push(i.toString()) + let o = val[i] + if (CID.isCID(o)) { + yield [__path.join('/'), o] + } else if (typeof o === 'object') { + yield * dagCborLinks(o, _path, false) + } + } + } else { + if (CID.isCID(val)) { + yield [_path.join('/'), val] + } else { + yield * dagCborLinks(val, _path, false) + } + } + } + } +} + +const PinTypes = { + /** @type {'direct'} */ + direct: ('direct'), + /** @type {'recursive'} */ + recursive: ('recursive'), + /** @type {'indirect'} */ + indirect: ('indirect'), + /** @type {'all'} */ + all: ('all') +} + +class Pins { + /** + * @param {Object} config + * @param {import('interface-datastore').Datastore} config.pinstore + * @param {import('interface-blockstore').Blockstore} config.blockstore + * @param {import('./types').CodecLoader} config.codecs + */ + constructor ({ pinstore, blockstore, codecs }) { + this.pinstore = pinstore + this.blockstore = blockstore + this.codecs = codecs + this.log = debug('ipfs:pin') + this.directPins = new Set() + this.recursivePins = new Set() + } + + /** + * @private + * @param {CID} cid + * @param {AbortOptions} [options] + * @returns {AsyncGenerator} + */ + async * _walkDag (cid, options) { + const block = await this.blockstore.get(cid, options) + const codec = await this.codecs.getCodec(cid.code) + const node = codec.decode(block) + + if (cid.code === dagPb.code) { + for (const link of node.Links) { + yield link.Hash + yield * this._walkDag(link.Hash, options) + } + } else if (cid.code === dagCbor.code) { + for (const [, childCid] of dagCborLinks(node)) { + yield childCid + yield * this._walkDag(childCid, options) + } + } + } + + /** + * @param {CID} cid + * @param {PinOptions & AbortOptions} [options] + * @returns {Promise} + */ + async pinDirectly (cid, options = {}) { + await this.blockstore.get(cid, options) + + /** @type {Pin} */ + const pin = { + depth: 0 + } + + if (cid.version !== 0) { + pin.version = cid.version + } + + if (cid.code !== dagPb.code) { + pin.codec = cid.code + } + + if (options.metadata) { + pin.metadata = options.metadata + } + + return this.pinstore.put(cidToKey(cid), cborg.encode(pin)) + } + + /** + * @param {CID} cid + * @param {AbortOptions} [options] + * @returns {Promise} + */ + // eslint-disable-next-line require-await + async unpin (cid, options) { + return this.pinstore.delete(cidToKey(cid)) + } + + /** + * @param {CID} cid + * @param {PinOptions & AbortOptions} [options] + * @returns {Promise} + */ + async pinRecursively (cid, options = {}) { + await this.fetchCompleteDag(cid, options) + + /** @type {Pin} */ + const pin = { + depth: Infinity + } + + if (cid.version !== 0) { + pin.version = cid.version + } + + if (cid.code !== dagPb.code) { + pin.codec = cid.code + } + + if (options.metadata) { + pin.metadata = options.metadata + } + + await this.pinstore.put(cidToKey(cid), cborg.encode(pin)) + } + + /** + * @param {AbortOptions} [options] + */ + async * directKeys (options) { + for await (const entry of this.pinstore.query({ + filters: [(entry) => { + const pin = cborg.decode(entry.value) + + return pin.depth === 0 + }] + })) { + const pin = cborg.decode(entry.value) + const version = pin.version || 0 + const codec = pin.codec != null ? pin.codec : dagPb.code + const multihash = keyToMultihash(entry.key) + + yield { + cid: CID.create(version, codec, multihash), + metadata: pin.metadata + } + } + } + + /** + * @param {AbortOptions} [options] + */ + async * recursiveKeys (options) { + for await (const entry of this.pinstore.query({ + filters: [(entry) => { + const pin = cborg.decode(entry.value) + + return pin.depth === Infinity + }] + })) { + const pin = cborg.decode(entry.value) + const version = pin.version || 0 + const codec = pin.codec != null ? pin.codec : dagPb.code + const multihash = keyToMultihash(entry.key) + + yield { + cid: CID.create(version, codec, multihash), + metadata: pin.metadata + } + } + } + + /** + * @param {AbortOptions} [options] + */ + async * indirectKeys (options) { + for await (const { cid } of this.recursiveKeys()) { + for await (const childCid of this._walkDag(cid, options)) { + // recursive pins override indirect pins + const types = [ + PinTypes.recursive + ] + + const result = await this.isPinnedWithType(childCid, types) + + if (result.pinned) { + continue + } + + yield childCid + } + } + } + + /** + * @param {CID} cid + * @param {PinQueryType|PinQueryType[]} types + * @param {AbortOptions} [options] + */ + async isPinnedWithType (cid, types, options) { + if (!Array.isArray(types)) { + types = [types] + } + + const all = types.includes(PinTypes.all) + const direct = types.includes(PinTypes.direct) + const recursive = types.includes(PinTypes.recursive) + const indirect = types.includes(PinTypes.indirect) + + if (recursive || direct || all) { + const result = await first(this.pinstore.query({ + prefix: cidToKey(cid).toString(), + filters: [entry => { + if (all) { + return true + } + + const pin = cborg.decode(entry.value) + + return types.includes(pin.depth === 0 ? PinTypes.direct : PinTypes.recursive) + }], + limit: 1 + })) + + if (result) { + const pin = cborg.decode(result.value) + + return { + cid, + pinned: true, + reason: pin.depth === 0 ? PinTypes.direct : PinTypes.recursive, + metadata: pin.metadata + } + } + } + + const self = this + + /** + * @param {CID} key + * @param {AsyncIterable<{ cid: CID, metadata: any }>} source + */ + async function * findChild (key, source) { + for await (const { cid: parentCid } of source) { + for await (const childCid of self._walkDag(parentCid)) { + if (childCid.equals(key)) { + yield parentCid + return + } + } + } + } + + if (all || indirect) { + // indirect (default) + // check each recursive key to see if multihash is under it + + const parentCid = await first(findChild(cid, this.recursiveKeys())) + + if (parentCid) { + return { + cid, + pinned: true, + reason: PinTypes.indirect, + parent: parentCid + } + } + } + + return { + cid, + pinned: false + } + } + + /** + * @param {CID} cid + * @param {AbortOptions} options + */ + async fetchCompleteDag (cid, options) { + await all(this._walkDag(cid, options)) + } + + /** + * Throws an error if the pin type is invalid + * + * @param {any} type + * @returns {type is PinType} + */ + static checkPinType (type) { + if (typeof type !== 'string' || !Object.keys(PinTypes).includes(type)) { + throw invalidPinTypeErr(type) + } + return true + } +} + +Pins.PinTypes = PinTypes + +module.exports = Pins diff --git a/src/types.d.ts b/src/types.d.ts index cc7a42d1..5f26a18d 100644 --- a/src/types.d.ts +++ b/src/types.d.ts @@ -1,6 +1,7 @@ import type { Datastore } from 'interface-datastore' import type { CID } from 'multiformats' +import type { BlockCodec } from 'multiformats/codecs/interface' export type AwaitIterable = Iterable | AsyncIterable export type Await = Promise | T @@ -30,6 +31,8 @@ export interface Options { storageBackends?: Partial> storageBackendOptions?: Partial> + + codecLoader: CodecLoader } export type Backends = 'root' | 'blocks' | 'keys' | 'datastore' | 'pins' @@ -170,3 +173,15 @@ export interface ConnMgrConfig { export interface RoutingConfig { Type?: string } + +export type PinType = 'recursive' | 'direct' | 'indirect' | 'all' + +export type PinQueryType = 'recursive' | 'direct' | 'indirect' | 'all' + +export interface AbortOptions { + signal?: AbortSignal +} + +export interface CodecLoader { + getCodec: (codeOrName: number | string) => Promise> +} diff --git a/test/pins-test.js b/test/pins-test.js index a2c78cc3..7320573a 100644 --- a/test/pins-test.js +++ b/test/pins-test.js @@ -26,24 +26,24 @@ module.exports = (repo) => { describe('.put', () => { it('simple', async () => { - await repo.pins.put(b, data) + await repo.pins.pinstore.put(b, data) }) it('multi write (locks)', async () => { - await Promise.all([repo.pins.put(b, data), repo.pins.put(b, data)]) + await Promise.all([repo.pins.pinstore.put(b, data), repo.pins.pinstore.put(b, data)]) }) it('massive multiwrite', async function () { this.timeout(15000) // add time for ci await Promise.all(range(100).map((i) => { - return repo.pins.put(new Key('hello' + i), dataList[i]) + return repo.pins.pinstore.put(new Key('hello' + i), dataList[i]) })) }) }) describe('.get', () => { it('simple', async () => { - const val = await repo.pins.get(b) + const val = await repo.pins.pinstore.get(b) expect(val).to.be.eql(data) }) @@ -51,7 +51,7 @@ module.exports = (repo) => { this.timeout(15000) // add time for ci await Promise.all(range(20 * 100).map(async (i) => { const j = i % dataList.length - const val = await repo.pins.get(new Key('hello' + j)) + const val = await repo.pins.pinstore.get(new Key('hello' + j)) expect(val).to.be.eql(dataList[j]) })) }).timeout(10 * 1000) @@ -59,20 +59,20 @@ module.exports = (repo) => { describe('.has', () => { it('existing pin', async () => { - const exists = await repo.pins.has(b) + const exists = await repo.pins.pinstore.has(b) expect(exists).to.eql(true) }) it('non existent pin', async () => { - const exists = await repo.pins.has(new Key('world')) + const exists = await repo.pins.pinstore.has(new Key('world')) expect(exists).to.eql(false) }) }) describe('.delete', () => { it('simple', async () => { - await repo.pins.delete(b) - const exists = await repo.pins.has(b) + await repo.pins.pinstore.delete(b) + const exists = await repo.pins.pinstore.has(b) expect(exists).to.equal(false) }) }) From b72930a8bf5c71793867f7072fe9e35b25af8c3e Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 24 Jun 2021 18:39:29 +0100 Subject: [PATCH 10/25] feat: lift pinning into repo --- src/blockstore-utils.js | 4 +- src/config.js | 8 ++-- src/default-options.js | 5 +- src/idstore.js | 4 +- src/index.js | 42 +++++++++++------ src/pinned-blockstore.js | 94 +++++++++++++++++++++++++++++++++++++ src/pins.js | 60 ++++++++++++----------- src/types.d.ts | 6 +-- test/blockstore-test.js | 5 +- test/browser.js | 5 +- test/fixtures/load-codec.js | 26 ++++++++++ test/is-initialized.js | 3 +- test/lock-test.js | 3 +- test/migrations-test.js | 3 +- test/node.js | 18 +++---- test/options-test.js | 9 ++-- test/repo-test.js | 23 ++++----- 17 files changed, 229 insertions(+), 89 deletions(-) create mode 100644 src/pinned-blockstore.js create mode 100644 test/fixtures/load-codec.js diff --git a/src/blockstore-utils.js b/src/blockstore-utils.js index 80ef8f46..4c6c29d0 100644 --- a/src/blockstore-utils.js +++ b/src/blockstore-utils.js @@ -4,7 +4,7 @@ const { Key } = require('interface-datastore') const { CID } = require('multiformats') const mhd = require('multiformats/hashes/digest') const raw = require('multiformats/codecs/raw') -const errcode = require('err-code') +const errCode = require('err-code') const { base32 } = require('multiformats/bases/base32') /** @@ -17,7 +17,7 @@ exports.cidToKey = c => { const cid = CID.asCID(c) if (cid == null) { - throw errcode(new Error('Not a valid cid'), 'ERR_INVALID_CID') + throw errCode(new Error('Not a valid cid'), 'ERR_INVALID_CID') } const encoded = base32.encode(cid.multihash.bytes) diff --git a/src/config.js b/src/config.js index cbd7606e..c5425e30 100644 --- a/src/config.js +++ b/src/config.js @@ -4,7 +4,7 @@ const { Key } = require('interface-datastore') const { default: Queue } = require('p-queue') const _get = require('just-safe-get') const _set = require('just-safe-set') -const errcode = require('err-code') +const errCode = require('err-code') const errors = require('./errors') const uint8ArrayToString = require('uint8arrays/to-string') const uint8ArrayFromString = require('uint8arrays/from-string') @@ -78,11 +78,11 @@ module.exports = (store) => { set (key, value, options = {}) { // @ts-ignore ts thinks key will only be a string, but it may not be if (typeof key !== 'string' && !(key instanceof String)) { - throw errcode(new Error('Invalid key type: ' + typeof key), 'ERR_INVALID_KEY') + throw errCode(new Error('Invalid key type: ' + typeof key), 'ERR_INVALID_KEY') } if (value === undefined || (value instanceof Uint8Array)) { - throw errcode(new Error('Invalid value type: ' + typeof value), 'ERR_INVALID_VALUE') + throw errCode(new Error('Invalid value type: ' + typeof value), 'ERR_INVALID_VALUE') } return setQueue.add(() => _maybeDoSet({ @@ -100,7 +100,7 @@ module.exports = (store) => { */ replace (value, options = {}) { if (!value || (value instanceof Uint8Array)) { - throw errcode(new Error('Invalid value type: ' + typeof value), 'ERR_INVALID_VALUE') + throw errCode(new Error('Invalid value type: ' + typeof value), 'ERR_INVALID_VALUE') } return setQueue.add(() => _maybeDoSet({ diff --git a/src/default-options.js b/src/default-options.js index 694a68d9..cce52de5 100644 --- a/src/default-options.js +++ b/src/default-options.js @@ -26,10 +26,7 @@ const defaultOptions = { }, keys: { } - }, - codecLoader: { - getCodec: (codeOrName) => Promise.reject(new Error(`Could not load codec for "${codeOrName}"`)) } } -module.exports = defaultOptions \ No newline at end of file +module.exports = defaultOptions diff --git a/src/idstore.js b/src/idstore.js index 6afe8cad..974dea4d 100644 --- a/src/idstore.js +++ b/src/idstore.js @@ -4,7 +4,7 @@ const filter = require('it-filter') const pushable = require('it-pushable') const drain = require('it-drain') const { CID } = require('multiformats/cid') -const errcode = require('err-code') +const errCode = require('err-code') const { identity } = require('multiformats/hashes/identity') /** @@ -136,7 +136,7 @@ function extractContents (k) { const cid = CID.asCID(k) if (cid == null) { - throw errcode(new Error('Not a valid cid'), 'ERR_INVALID_CID') + throw errCode(new Error('Not a valid cid'), 'ERR_INVALID_CID') } if (cid.multihash.code !== identity.code) { diff --git a/src/index.js b/src/index.js index c2262909..ba89f011 100644 --- a/src/index.js +++ b/src/index.js @@ -2,7 +2,7 @@ const _get = require('just-safe-get') const debug = require('debug') -const errcode = require('err-code') +const errCode = require('err-code') const migrator = require('ipfs-repo-migrations') const bytes = require('bytes') const pathJoin = require('ipfs-utils/src/path-join') @@ -13,12 +13,13 @@ const version = require('./version') const config = require('./config') const spec = require('./spec') const apiAddr = require('./api-addr') -const blockstore = require('./blockstore') -const idstore = require('./idstore') +const createBlockstore = require('./blockstore') +const createIdstore = require('./idstore') const defaultOptions = require('./default-options') const defaultDatastore = require('./default-datastore') const ERRORS = require('./errors') const Pins = require('./pins') +const createPinnedBlockstore = require('./pinned-blockstore') const log = debug('ipfs:repo') @@ -47,11 +48,16 @@ const lockers = { class IpfsRepo { /** * @param {string} repoPath - path where the repo is stored + * @param {import('./types').loadCodec} loadCodec - a function that will load multiformat block codecs * @param {Options} [options] - Configuration */ - constructor (repoPath, options) { + constructor (repoPath, loadCodec, options) { if (typeof repoPath !== 'string') { - throw new Error('missing repoPath') + throw new Error('missing repo path') + } + + if (typeof loadCodec !== 'function') { + throw new Error('missing codec loader') } this.options = merge(defaultOptions, options) @@ -65,11 +71,19 @@ class IpfsRepo { this.root = backends.create('root', this.path, this.options) this.datastore = backends.create('datastore', pathJoin(this.path, 'datastore'), this.options) this.keys = backends.create('keys', pathJoin(this.path, 'keys'), this.options) + const blocksBaseStore = backends.create('blocks', pathJoin(this.path, 'blocks'), this.options) - const blockStore = blockstore(blocksBaseStore, this.options.storageBackendOptions.blocks) - this.blocks = idstore(blockStore) + const blockstore = createBlockstore(blocksBaseStore, this.options.storageBackendOptions.blocks) const pinstore = backends.create('pins', pathJoin(this.path, 'pins'), this.options) - this.pins = new Pins({ pinstore, blockstore: this.blocks, codecs: this.options.codecLoader}) + + this.pins = new Pins({ pinstore, blockstore, loadCodec }) + + // this blockstore will not delete blocks that have been pinned + const pinnedBlockstore = createPinnedBlockstore(this.pins, blockstore) + + // this blockstore will extract blocks from multihashes with the identity codec + this.blocks = createIdstore(pinnedBlockstore) + this.version = version(this.root) this.config = config(this.root) this.spec = spec(this.root) @@ -123,7 +137,7 @@ class IpfsRepo { */ async open () { if (!this.closed) { - throw errcode(new Error('repo is already open'), ERRORS.ERR_REPO_ALREADY_OPEN) + throw errCode(new Error('repo is already open'), ERRORS.ERR_REPO_ALREADY_OPEN) } log('opening at: %s', this.path) @@ -218,7 +232,7 @@ class IpfsRepo { const lockfile = await this._locker.lock(path) if (typeof lockfile.close !== 'function') { - throw errcode(new Error('Locks must have a close method'), 'ERR_NO_CLOSE_FUNCTION') + throw errCode(new Error('Locks must have a close method'), 'ERR_NO_CLOSE_FUNCTION') } return lockfile @@ -249,7 +263,7 @@ class IpfsRepo { ]) } catch (err) { if (err.code === 'ERR_NOT_FOUND') { - throw errcode(new Error('repo is not initialized yet'), ERRORS.ERR_REPO_NOT_INITIALIZED, { + throw errCode(new Error('repo is not initialized yet'), ERRORS.ERR_REPO_NOT_INITIALIZED, { path: this.path }) } @@ -258,7 +272,7 @@ class IpfsRepo { } if (!config) { - throw errcode(new Error('repo is not initialized yet'), ERRORS.ERR_REPO_NOT_INITIALIZED, { + throw errCode(new Error('repo is not initialized yet'), ERRORS.ERR_REPO_NOT_INITIALIZED, { path: this.path }) } @@ -271,7 +285,7 @@ class IpfsRepo { */ async close () { if (this.closed) { - throw errcode(new Error('repo is already closed'), ERRORS.ERR_REPO_ALREADY_CLOSED) + throw errCode(new Error('repo is already closed'), ERRORS.ERR_REPO_ALREADY_CLOSED) } log('closing at: %s', this.path) @@ -330,7 +344,7 @@ class IpfsRepo { repoSize: size } } - throw errcode(new Error('repo is not initialized yet'), ERRORS.ERR_REPO_NOT_INITIALIZED, { + throw errCode(new Error('repo is not initialized yet'), ERRORS.ERR_REPO_NOT_INITIALIZED, { path: this.path }) } diff --git a/src/pinned-blockstore.js b/src/pinned-blockstore.js new file mode 100644 index 00000000..e323e360 --- /dev/null +++ b/src/pinned-blockstore.js @@ -0,0 +1,94 @@ +'use strict' + +const map = require('it-map') +const errCode = require('err-code') +const Pins = require('./pins') + +/** + * @typedef {import('interface-datastore').Query} Query + * @typedef {import('interface-datastore').Datastore} Datastore + * @typedef {import('interface-datastore').Options} DatastoreOptions + * @typedef {import('interface-blockstore').Blockstore} Blockstore + * @typedef {import('multiformats/cid').CID} CID + */ + +/** + * + * @param {Blockstore} blockstore + */ +module.exports = createPinnedBlockstore + +/** + * @param {Pins} pins + * @param {Blockstore} store + * @returns {Blockstore} + */ +function createPinnedBlockstore (pins, store) { + return { + open () { + return store.open() + }, + + close () { + return store.close() + }, + + query (query, options) { + return store.query(query, options) + }, + + queryKeys (query, options) { + return store.queryKeys(query, options) + }, + + async get (cid, options) { + return store.get(cid, options) + }, + + async * getMany (cids, options) { + yield * store.getMany(cids, options) + }, + + async put (cid, buf, options) { + await store.put(cid, buf, options) + }, + + async * putMany (pairs, options) { + yield * store.putMany(pairs, options) + }, + + has (cid, options) { + return store.has(cid, options) + }, + + async delete (cid, options) { + await ensureNotPinned(cid, pins) + + return store.delete(cid, options) + }, + + deleteMany (cids, options) { + return store.deleteMany(map(cids, async cid => { + await ensureNotPinned(cid, pins) + + return cid + }), options) + }, + + batch () { + return store.batch() + } + } +} + +/** + * @param {CID} cid + * @param {Pins} pins + */ +async function ensureNotPinned (cid, pins) { + const { pinned, reason } = await pins.isPinnedWithType(cid, Pins.PinTypes.all) + + if (pinned) { + throw errCode(new Error(`pinned: ${reason}`), 'ERR_BLOCK_PINNED') + } +} diff --git a/src/pins.js b/src/pins.js index 72970b93..2471063a 100644 --- a/src/pins.js +++ b/src/pins.js @@ -46,16 +46,17 @@ function invalidPinTypeErr (type) { * @param {CID} cid */ function cidToKey (cid) { - return new Key(`/${base32.encode(cid.multihash.bytes).toUpperCase()}`) + return new Key(`/${base32.encode(cid.multihash.bytes).toUpperCase().substring(1)}`) } /** * @param {Key | string} key */ function keyToMultihash (key) { - return Digest.decode(base32.decode(key.toString().toLowerCase().slice(1))) + return Digest.decode(base32.decode(`b${key.toString().toLowerCase().substring(1)}`)) } +// eslint-disable-next-line jsdoc/require-returns-check /** * @param {any} obj * @param {string[]} path @@ -67,18 +68,18 @@ function * dagCborLinks (obj, path = [], parseBuffer = true) { obj = cborg.decode(obj) } - for (let key of Object.keys(obj)) { - let _path = path.slice() + for (const key of Object.keys(obj)) { + const _path = path.slice() _path.push(key) - let val = obj[key] + const val = obj[key] if (val && typeof val === 'object') { if (Array.isArray(val)) { for (let i = 0; i < val.length; i++) { - let __path = _path.slice() + const __path = _path.slice() __path.push(i.toString()) - let o = val[i] - if (CID.isCID(o)) { + const o = val[i] + if (CID.isCID(o)) { // eslint-disable-line max-depth yield [__path.join('/'), o] } else if (typeof o === 'object') { yield * dagCborLinks(o, _path, false) @@ -111,13 +112,13 @@ class Pins { * @param {Object} config * @param {import('interface-datastore').Datastore} config.pinstore * @param {import('interface-blockstore').Blockstore} config.blockstore - * @param {import('./types').CodecLoader} config.codecs + * @param {import('./types').loadCodec} config.loadCodec */ - constructor ({ pinstore, blockstore, codecs }) { + constructor ({ pinstore, blockstore, loadCodec }) { this.pinstore = pinstore this.blockstore = blockstore - this.codecs = codecs - this.log = debug('ipfs:pin') + this.loadCodec = loadCodec + this.log = debug('ipfs:repo:pin') this.directPins = new Set() this.recursivePins = new Set() } @@ -129,20 +130,24 @@ class Pins { * @returns {AsyncGenerator} */ async * _walkDag (cid, options) { - const block = await this.blockstore.get(cid, options) - const codec = await this.codecs.getCodec(cid.code) - const node = codec.decode(block) - - if (cid.code === dagPb.code) { - for (const link of node.Links) { - yield link.Hash - yield * this._walkDag(link.Hash, options) - } - } else if (cid.code === dagCbor.code) { - for (const [, childCid] of dagCborLinks(node)) { - yield childCid - yield * this._walkDag(childCid, options) + try { + const block = await this.blockstore.get(cid, options) + const codec = await this.loadCodec(cid.code) + const node = codec.decode(block) + + if (cid.code === dagPb.code) { + for (const link of node.Links) { + yield link.Hash + yield * this._walkDag(link.Hash, options) + } + } else if (cid.code === dagCbor.code) { + for (const [, childCid] of dagCborLinks(node)) { + yield childCid + yield * this._walkDag(childCid, options) + } } + } catch (err) { + this.log('Could not walk DAG for CID', cid.toString(), err) } } @@ -179,9 +184,8 @@ class Pins { * @param {AbortOptions} [options] * @returns {Promise} */ - // eslint-disable-next-line require-await - async unpin (cid, options) { - return this.pinstore.delete(cidToKey(cid)) + unpin (cid, options) { + return this.pinstore.delete(cidToKey(cid), options) } /** diff --git a/src/types.d.ts b/src/types.d.ts index 5f26a18d..4eaf91ec 100644 --- a/src/types.d.ts +++ b/src/types.d.ts @@ -31,8 +31,6 @@ export interface Options { storageBackends?: Partial> storageBackendOptions?: Partial> - - codecLoader: CodecLoader } export type Backends = 'root' | 'blocks' | 'keys' | 'datastore' | 'pins' @@ -182,6 +180,4 @@ export interface AbortOptions { signal?: AbortSignal } -export interface CodecLoader { - getCodec: (codeOrName: number | string) => Promise> -} +export type loadCodec = (codeOrName: number | string) => Promise> diff --git a/test/blockstore-test.js b/test/blockstore-test.js index bf02d7ef..2f895bbf 100644 --- a/test/blockstore-test.js +++ b/test/blockstore-test.js @@ -20,6 +20,7 @@ const { identity } = require('multiformats/hashes/identity') const raw = require('multiformats/codecs/raw') const dagCbor = require('@ipld/dag-cbor') const dagPb = require('@ipld/dag-pb') +const loadCodec = require('./fixtures/load-codec') async function makePair () { const data = new TextEncoder().encode(`hello-${Math.random()}`) @@ -205,7 +206,7 @@ module.exports = (repo) => { const cid = CID.createV0(digest) const key = cidToKey(cid) - otherRepo = new IPFSRepo(tempDir(), { + otherRepo = new IPFSRepo(tempDir(), loadCodec, { storageBackends: { blocks: class ExplodingBlockStore extends Adapter { /** @@ -334,7 +335,7 @@ module.exports = (repo) => { const cid = CID.createV0(digest) const key = cidToKey(cid) - otherRepo = new IPFSRepo(tempDir(), { + otherRepo = new IPFSRepo(tempDir(), loadCodec, { storageBackends: { blocks: class ExplodingBlockStore extends Adapter { /** diff --git a/test/browser.js b/test/browser.js index 914de8c9..7660c76a 100644 --- a/test/browser.js +++ b/test/browser.js @@ -3,12 +3,13 @@ 'use strict' const IPFSRepo = require('../src') +const loadCodec = require('./fixtures/load-codec') async function createTempRepo (options = {}) { const date = Date.now().toString() const repoPath = 'test-repo-for-' + date - const repo = new IPFSRepo(repoPath, options) + const repo = new IPFSRepo(repoPath, loadCodec, options) await repo.init({}) await repo.open() @@ -19,7 +20,7 @@ describe('IPFS Repo Tests on the Browser', () => { require('./options-test') require('./migrations-test')(createTempRepo) - const repo = new IPFSRepo('myrepo') + const repo = new IPFSRepo('myrepo', loadCodec) before(async () => { await repo.init({}) diff --git a/test/fixtures/load-codec.js b/test/fixtures/load-codec.js new file mode 100644 index 00000000..4ed5c587 --- /dev/null +++ b/test/fixtures/load-codec.js @@ -0,0 +1,26 @@ +/* eslint-env mocha */ +'use strict' + +const dagPb = require('@ipld/dag-pb') +const dagCbor = require('@ipld/dag-cbor') + +/** + * @typedef {import('multiformats/codecs/interface').BlockCodec} BlockCodec + */ + +/** + * @type {import('../../src/types').loadCodec} + */ +const loadCodec = (codeOrName) => { + /** @type {Record} */ + const lookup = { + [dagPb.code]: dagPb, + [dagPb.name]: dagPb, + [dagCbor.code]: dagCbor, + [dagCbor.name]: dagCbor + } + + return Promise.resolve(lookup[codeOrName]) +} + +module.exports = loadCodec diff --git a/test/is-initialized.js b/test/is-initialized.js index 2a08b86a..d20db9f8 100644 --- a/test/is-initialized.js +++ b/test/is-initialized.js @@ -5,6 +5,7 @@ const { expect } = require('aegir/utils/chai') const tempDir = require('ipfs-utils/src/temp-dir') const IPFSRepo = require('../src') +const loadCodec = require('./fixtures/load-codec') /** * @typedef {import('../src/index')} Repo @@ -15,7 +16,7 @@ describe('isInitialized', () => { let repo beforeEach(() => { - repo = new IPFSRepo(tempDir(b => 'test-repo-for-' + b)) + repo = new IPFSRepo(tempDir(b => 'test-repo-for-' + b), loadCodec) }) it('should be false before initialization', async () => { diff --git a/test/lock-test.js b/test/lock-test.js index 2bb99cea..be9a1193 100644 --- a/test/lock-test.js +++ b/test/lock-test.js @@ -5,6 +5,7 @@ const { expect } = require('aegir/utils/chai') const IPFSRepo = require('../') const lockMemory = require('../src/lock-memory') const { LockExistsError } = require('./../src/errors') +const loadCodec = require('./fixtures/load-codec') /** * @param {import('../src/index')} repo @@ -18,7 +19,7 @@ module.exports = (repo) => { }) it('should prevent multiple repos from using the same path', async () => { - const repoClone = new IPFSRepo(repo.path, repo.options) + const repoClone = new IPFSRepo(repo.path, loadCodec, repo.options) try { await repoClone.init({}) await repoClone.open() diff --git a/test/migrations-test.js b/test/migrations-test.js index e6d979fc..9f000314 100644 --- a/test/migrations-test.js +++ b/test/migrations-test.js @@ -9,6 +9,7 @@ const migrator = require('ipfs-repo-migrations') const constants = require('../src/constants') const errors = require('../src/errors') const IPFSRepo = require('../src') +const loadCodec = require('./fixtures/load-codec') /** * @typedef {import('../src/index')} Repo */ @@ -75,7 +76,7 @@ module.exports = (createTempRepo) => { const newOpts = Object.assign({}, repo.options) newOpts.autoMigrate = option - const newRepo = new IPFSRepo(repo.path, newOpts) + const newRepo = new IPFSRepo(repo.path, loadCodec, newOpts) expect(migrateStub.called).to.be.false() diff --git a/test/node.js b/test/node.js index 5514ef43..6d4d4ec7 100644 --- a/test/node.js +++ b/test/node.js @@ -8,22 +8,27 @@ const path = require('path') const promisify = require('util').promisify const os = require('os') const { LockExistsError } = require('../src/errors') +const loadCodec = require('./fixtures/load-codec') const asyncRimraf = promisify(rimraf) const asyncNcp = promisify(ncp) const fsstat = promisify(fs.stat) -const IPFSRepo = require('../src') - /** + * @typedef {import('multiformats/codecs/interface').BlockCodec} BlockCodec * @typedef {import('../src/types').Options} Options */ -async function createTempRepo (options = {}) { +const IPFSRepo = require('../src') + +/** + * @param {Options} options + */ +async function createTempRepo (options) { const date = Date.now().toString() const repoPath = path.join(os.tmpdir(), 'test-repo-for-' + date) await asyncNcp(path.join(__dirname, 'test-repo'), repoPath) - const repo = new IPFSRepo(repoPath, options) + const repo = new IPFSRepo(repoPath, loadCodec, options) await repo.open() return repo } @@ -73,9 +78,6 @@ describe('IPFS Repo Tests onNode.js', () => { { name: 'memory', opts: { - // i dont think we need this - // fs: require('interface-datastore').MemoryDatastore, - // level: require('memdown'), lock: 'memory' }, init: true @@ -98,7 +100,7 @@ describe('IPFS Repo Tests onNode.js', () => { const date = Date.now().toString() const repoPath = path.join(os.tmpdir(), 'test-repo-for-' + date) - const repo = new IPFSRepo(repoPath, r.opts) + const repo = new IPFSRepo(repoPath, loadCodec, r.opts) before(async () => { if (r.init) { diff --git a/test/options-test.js b/test/options-test.js index dc724e6c..84c00066 100644 --- a/test/options-test.js +++ b/test/options-test.js @@ -10,6 +10,7 @@ if (!rimraf.sync) { rimraf.sync = noop } const Repo = require('../') +const loadCodec = require('./fixtures/load-codec') describe('custom options tests', () => { const repoPath = tempDir() @@ -21,11 +22,11 @@ describe('custom options tests', () => { expect( // @ts-expect-error () => new Repo() - ).to.throw('missing repoPath') + ).to.throw('missing repo path') }) it('default options', () => { - const repo = new Repo(repoPath) + const repo = new Repo(repoPath, loadCodec) expect(repo.options).to.deep.equal(expectedRepoOptions()) }) @@ -47,7 +48,7 @@ describe('custom options tests', () => { } } - const repo = new Repo(repoPath, { + const repo = new Repo(repoPath, loadCodec, { lock }) @@ -73,7 +74,7 @@ describe('custom options tests', () => { } } - const repo = new Repo(repoPath, { + const repo = new Repo(repoPath, loadCodec, { // @ts-expect-error lock }) diff --git a/test/repo-test.js b/test/repo-test.js index 37922d69..f50eda43 100644 --- a/test/repo-test.js +++ b/test/repo-test.js @@ -7,6 +7,7 @@ const IPFSRepo = require('../') const Errors = require('../src/errors') const bytes = require('bytes') const { Adapter } = require('interface-datastore') +const loadCodec = require('./fixtures/load-codec') /** * @typedef {import('interface-datastore').Key} Key @@ -172,7 +173,7 @@ module.exports = (repo) => { count++ } } - const repo = new IPFSRepo(tempDir(), { + const repo = new IPFSRepo(tempDir(), loadCodec, { lock: 'memory', storageBackends: { root: FakeDatastore, @@ -200,7 +201,7 @@ module.exports = (repo) => { }) it('should throw non-already-open errors when opening the root', async () => { - const otherRepo = new IPFSRepo(tempDir()) + const otherRepo = new IPFSRepo(tempDir(), loadCodec) const err = new Error('wat') otherRepo.root.open = () => { @@ -215,7 +216,7 @@ module.exports = (repo) => { }) it('should ignore non-already-open errors when opening the root', async () => { - const otherRepo = new IPFSRepo(tempDir()) + const otherRepo = new IPFSRepo(tempDir(), loadCodec) const err = new Error('Already open') let threwError = false @@ -251,7 +252,7 @@ module.exports = (repo) => { }) it('should remove the lockfile when opening the repo fails', async () => { - otherRepo = new IPFSRepo(tempDir(), { + otherRepo = new IPFSRepo(tempDir(), loadCodec, { storageBackends: { datastore: ExplodingDatastore, blocks: ExplodingDatastore, @@ -270,7 +271,7 @@ module.exports = (repo) => { }) it('should re-throw the original error even when removing the lockfile fails', async () => { - otherRepo = new IPFSRepo(tempDir(), { + otherRepo = new IPFSRepo(tempDir(), loadCodec, { storageBackends: { datastore: ExplodingDatastore, blocks: ExplodingDatastore, @@ -294,7 +295,7 @@ module.exports = (repo) => { }) it('should throw when repos are not initialised', async () => { - otherRepo = new IPFSRepo(tempDir(), { + otherRepo = new IPFSRepo(tempDir(), loadCodec, { storageBackends: { datastore: ExplodingDatastore, blocks: ExplodingDatastore, @@ -312,7 +313,7 @@ module.exports = (repo) => { }) it('should throw when config is not set', async () => { - otherRepo = new IPFSRepo(tempDir()) + otherRepo = new IPFSRepo(tempDir(), loadCodec) otherRepo.config.exists = async () => false otherRepo.spec.exists = async () => true otherRepo.version.check = async () => false @@ -327,7 +328,7 @@ module.exports = (repo) => { it('should return the max storage stat when set', async () => { const maxStorage = '1GB' - otherRepo = new IPFSRepo(tempDir()) + otherRepo = new IPFSRepo(tempDir(), loadCodec) await otherRepo.init({}) await otherRepo.open() await otherRepo.config.set('Datastore.StorageMax', maxStorage) @@ -339,7 +340,7 @@ module.exports = (repo) => { }) it('should throw unexpected errors when closing', async () => { - otherRepo = new IPFSRepo(tempDir()) + otherRepo = new IPFSRepo(tempDir(), loadCodec) await otherRepo.init({}) await otherRepo.open() @@ -358,7 +359,7 @@ module.exports = (repo) => { }) it('should swallow expected errors when closing', async () => { - otherRepo = new IPFSRepo(tempDir()) + otherRepo = new IPFSRepo(tempDir(), loadCodec) await otherRepo.init({}) await otherRepo.open() @@ -372,7 +373,7 @@ module.exports = (repo) => { }) it('should throw unexpected errors when checking if the repo has been initialised', async () => { - otherRepo = new IPFSRepo(tempDir()) + otherRepo = new IPFSRepo(tempDir(), loadCodec) otherRepo.config.exists = async () => { return true From 22d006f38257217ac110c260012e7997efba4655 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 24 Jun 2021 18:45:44 +0100 Subject: [PATCH 11/25] chore: update deps --- package.json | 2 ++ 1 file changed, 2 insertions(+) diff --git a/package.json b/package.json index 724ea93e..da0f0537 100644 --- a/package.json +++ b/package.json @@ -69,6 +69,7 @@ }, "dependencies": { "bytes": "^3.1.0", + "cborg": "^1.3.4", "datastore-core": "^4.0.0", "datastore-fs": "^4.0.0", "datastore-level": "^5.0.0", @@ -79,6 +80,7 @@ "ipfs-repo-migrations": "ipfs/js-ipfs-repo-migrations#chore/upgrade-multiformats", "ipfs-utils": "^7.0.0", "it-filter": "^1.0.2", + "it-map": "^1.0.5", "it-pushable": "^1.4.0", "just-safe-get": "^2.0.0", "just-safe-set": "^2.1.0", From 509842468206370511c2ca3661f97536dfbc89d9 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Tue, 29 Jun 2021 12:08:00 +0100 Subject: [PATCH 12/25] chore: update deps --- package.json | 28 +++++++++++++--------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/package.json b/package.json index da0f0537..e101e651 100644 --- a/package.json +++ b/package.json @@ -12,8 +12,8 @@ "browser": { "rimraf": false, "datastore-fs": "datastore-level", - "./src/lock.js": "./src/lock-memory.js", - "./src/default-options.js": "./src/default-options-browser.js" + "./src/locks/fs.js": "./src/locks/memory.js", + "./src/default-options.js": "./src/default-options.browser.js" }, "scripts": { "prepare": "aegir build --no-bundle", @@ -45,46 +45,44 @@ "npm": ">=6.0.0" }, "devDependencies": { - "@ipld/dag-cbor": "^5.0.5", - "@ipld/dag-pb": "^1.1.4", + "@ipld/dag-cbor": "^6.0.4", + "@ipld/dag-pb": "^2.1.0", "@types/bytes": "^3.1.0", "@types/debug": "^4.1.5", - "@types/memdown": "^3.0.0", - "@types/ncp": "^2.0.4", "@types/proper-lockfile": "^4.1.1", "@types/rimraf": "^3.0.0", "aegir": "^33.1.0", "assert": "^2.0.0", + "blockstore-datastore-adapter": "0.0.3", "events": "^3.3.0", + "ipfs-utils": "^8.1.3", "it-all": "^1.0.2", "it-drain": "^1.0.1", "it-first": "^1.0.2", "just-range": "^2.1.0", - "memdown": "^6.0.0", - "ncp": "^2.0.0", "rimraf": "^3.0.0", - "sinon": "^10.0.0", + "sinon": "^11.1.1", "url": "^0.11.0", "util": "^0.12.3" }, "dependencies": { "bytes": "^3.1.0", "cborg": "^1.3.4", - "datastore-core": "^4.0.0", - "datastore-fs": "^4.0.0", - "datastore-level": "^5.0.0", "debug": "^4.1.0", "err-code": "^3.0.1", - "interface-blockstore": "^0.0.5", - "interface-datastore": "^4.0.2", + "interface-blockstore": "^0.1.0", + "interface-datastore": "^5.0.0", "ipfs-repo-migrations": "ipfs/js-ipfs-repo-migrations#chore/upgrade-multiformats", - "ipfs-utils": "^7.0.0", "it-filter": "^1.0.2", "it-map": "^1.0.5", + "it-merge": "^1.0.2", + "it-parallel-batch": "^1.0.9", + "it-pipe": "^1.1.0", "it-pushable": "^1.4.0", "just-safe-get": "^2.0.0", "just-safe-set": "^2.1.0", "merge-options": "^3.0.4", + "mortice": "^2.0.1", "multiformats": "^9.0.4", "p-queue": "^6.0.0", "proper-lockfile": "^4.0.0", From d8c46ece8550b008256a03a2b71ce43c6c991cf4 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Tue, 29 Jun 2021 12:11:53 +0100 Subject: [PATCH 13/25] feat: accept instances of datastore/blockstores instead of controlling their lifecycles --- src/backends.js | 25 -- src/blockstore.js | 185 ----------- src/default-options-browser.js | 42 --- src/default-options.browser.js | 9 + src/default-options.js | 23 +- src/gc.js | 159 +++++++++ src/index.js | 130 ++++---- src/{lock.js => locks/fs.js} | 6 +- src/{lock-memory.js => locks/memory.js} | 6 +- src/pins.js | 99 +----- src/types.d.ts | 142 +++++++- .../blockstore.js} | 23 +- src/utils/walk-dag.js | 84 +++++ test/blockstore-test.js | 127 ++++---- test/browser.js | 20 +- test/config-test.js | 3 +- test/datastore-test.js | 6 +- test/fixtures/create-backend.js | 19 ++ test/interop-test.js | 40 --- test/is-initialized.js | 12 +- test/keystore-test.js | 6 +- test/lock-test.js | 14 +- test/migrations-test.js | 28 +- test/node.js | 98 ++---- test/options-test.js | 50 +-- test/pins-test.js | 6 +- test/repo-test.js | 163 ++++----- test/stat-test.js | 3 +- test/test-repo/api | 1 - ...7E32LQAL6236OUKZTMHPQSFIXPWXNZHQOV7JQ.data | 56 ---- ...Y7SYTVOFIEO5657YM3T6DTHFRQYSUZAGRLEPY.data | Bin 10933 -> 0 bytes ...LPIMICMCRBABQCR6FC5CFHLZV74LDT2PZIFJQ.data | 3 - ...AY63YIR5EM5R4TXRVPZC2BWPOB5FNVRYEXG7I.data | 308 ------------------ ...H2DGUZIRE66VZHMPSOBVY2YU67EWMSHH4CI7Y.data | 93 ------ ...AJNF2GF5UHUAGGHC6LLAH6VYDEKLQMD4QLILY.data | 8 - ...HY5SBVURQIA4LDBBVVTHZMZE5KPUQSVNVHJAQ.data | 3 - ...QW2YVM4KGFORVFJAQYY55VK3WJGLZ2MS4RJNQ.data | 3 - ...TJNUP57QHR4SKHZ74OIITBBGLOMCO3ZOLWLGA.data | 9 - ...RYYVBUXHTS3SM5EORZDU63LYPEFUAFE4SBM4I.data | 115 ------- ...JZA2SYY7OGCSX6FRSIZS2VQQPVKOA2Z4VXN2I.data | Bin 10765 -> 0 bytes ...OUGKFK27IE33WKGJNDW2TY3LSBNQ34R6OVOOQ.data | 27 -- ...57JSEZN64SIJ5OIHSGJG4TJSSJLGI3PBJLQVI.data | 0 ...JOCHWXDRK5EXZQILBCKAPEDUJENZ5B5HJ5R3A.data | 28 -- ...JYCKHWEL3A6UHYWGT2SVEOIQYOVOC2RMJBRQQ.data | Bin 355 -> 0 bytes ...TUPL3YPUYSLNHT5HJ2USSAZZO5GUTAVSYQRYY.data | Bin 10849 -> 0 bytes ...S65DFU3KLKFK6EJVAIKNVWIDKR3UCGX2LOSBI.data | 3 - test/test-repo/blocks/SHARDING | 1 - ...DYQYKYYGMCX2ISX3WLLY4UJQTVIWSCB5AUU7A.data | Bin 10891 -> 0 bytes ...IHF64AEZZDKWGGUVG7JNRNTYBQJ4ALX6JGV2Q.data | 3 - ...TN55TBQT5H7T37KCTQMVM6Z6IOJPWVPPYHV3Q.data | 3 - ...YGP4F6LU5WSQKMNZHPN6IGKKQU62FWLSH7VPY.data | Bin 10807 -> 0 bytes ...NXKL3LKJHZWQMXKJO7ZJOGM6MYLW3OGOLIWSA.data | 3 - ...2BFAGLXEZL4UWFNWM4LFTLMXQBCERZ6CMLX3Y.data | 2 - test/test-repo/blocks/_README | 30 -- test/test-repo/config | 99 ------ test/test-repo/datastore/000051.log | Bin 30167 -> 0 bytes test/test-repo/datastore/000053.ldb | Bin 1167 -> 0 bytes test/test-repo/datastore/CURRENT | 1 - test/test-repo/datastore/LOCK | 0 test/test-repo/datastore/LOG | 233 ------------- test/test-repo/datastore/MANIFEST-000052 | Bin 592 -> 0 bytes test/test-repo/datastore_spec | 1 - test/test-repo/version | 1 - tsconfig.json | 1 - types/merge-options/index.d.ts | 2 +- 65 files changed, 726 insertions(+), 1839 deletions(-) delete mode 100644 src/backends.js delete mode 100644 src/blockstore.js delete mode 100644 src/default-options-browser.js create mode 100644 src/default-options.browser.js create mode 100644 src/gc.js rename src/{lock.js => locks/fs.js} (91%) rename src/{lock-memory.js => locks/memory.js} (86%) rename src/{blockstore-utils.js => utils/blockstore.js} (69%) create mode 100644 src/utils/walk-dag.js create mode 100644 test/fixtures/create-backend.js delete mode 100644 test/interop-test.js delete mode 100644 test/test-repo/api delete mode 100644 test/test-repo/blocks/7J/CIQKKLBWAIBQZOIS5X7E32LQAL6236OUKZTMHPQSFIXPWXNZHQOV7JQ.data delete mode 100644 test/test-repo/blocks/EP/CIQOEA4URV75WXHSIBY7SYTVOFIEO5657YM3T6DTHFRQYSUZAGRLEPY.data delete mode 100644 test/test-repo/blocks/FJ/CIQMSSWEB56MYVN7BGLPIMICMCRBABQCR6FC5CFHLZV74LDT2PZIFJQ.data delete mode 100644 test/test-repo/blocks/G7/CIQCDCXJ6563LCORPOAY63YIR5EM5R4TXRVPZC2BWPOB5FNVRYEXG7I.data delete mode 100644 test/test-repo/blocks/I7/CIQGE2VHIPKM4NFWKSH2DGUZIRE66VZHMPSOBVY2YU67EWMSHH4CI7Y.data delete mode 100644 test/test-repo/blocks/IL/CIQJFGRQHQ45VCQLM7AJNF2GF5UHUAGGHC6LLAH6VYDEKLQMD4QLILY.data delete mode 100644 test/test-repo/blocks/JA/CIQHS34HCZMO72ZWZBHY5SBVURQIA4LDBBVVTHZMZE5KPUQSVNVHJAQ.data delete mode 100644 test/test-repo/blocks/JN/CIQPHMHGQLLZXC32FQQW2YVM4KGFORVFJAQYY55VK3WJGLZ2MS4RJNQ.data delete mode 100644 test/test-repo/blocks/LG/CIQJBQD2O6K4CGJVCCTJNUP57QHR4SKHZ74OIITBBGLOMCO3ZOLWLGA.data delete mode 100644 test/test-repo/blocks/M4/CIQOLBQZSZAODJGGH6RYYVBUXHTS3SM5EORZDU63LYPEFUAFE4SBM4I.data delete mode 100644 test/test-repo/blocks/N2/CIQDWKPBHXLJ3XVELRJZA2SYY7OGCSX6FRSIZS2VQQPVKOA2Z4VXN2I.data delete mode 100644 test/test-repo/blocks/OO/CIQBT4N7PS5IZ5IG2ZOUGKFK27IE33WKGJNDW2TY3LSBNQ34R6OVOOQ.data delete mode 100644 test/test-repo/blocks/QV/CIQOHMGEIKMPYHAUTL57JSEZN64SIJ5OIHSGJG4TJSSJLGI3PBJLQVI.data delete mode 100644 test/test-repo/blocks/R3/CIQBED3K6YA5I3QQWLJOCHWXDRK5EXZQILBCKAPEDUJENZ5B5HJ5R3A.data delete mode 100644 test/test-repo/blocks/RQ/CIQGPZHNWKMUTP4X4VJYCKHWEL3A6UHYWGT2SVEOIQYOVOC2RMJBRQQ.data delete mode 100644 test/test-repo/blocks/RY/CIQKVNGIGQHPMLDVIATUPL3YPUYSLNHT5HJ2USSAZZO5GUTAVSYQRYY.data delete mode 100644 test/test-repo/blocks/SB/CIQMNAV4TX2DAU5DPES65DFU3KLKFK6EJVAIKNVWIDKR3UCGX2LOSBI.data delete mode 100644 test/test-repo/blocks/SHARDING delete mode 100644 test/test-repo/blocks/U7/CIQF5KHIGUT3ZN2UFEDYQYKYYGMCX2ISX3WLLY4UJQTVIWSCB5AUU7A.data delete mode 100644 test/test-repo/blocks/V2/CIQFIFKJWK4S7LN2N6IHF64AEZZDKWGGUVG7JNRNTYBQJ4ALX6JGV2Q.data delete mode 100644 test/test-repo/blocks/V3/CIQJEFSTFKB33VFP24TN55TBQT5H7T37KCTQMVM6Z6IOJPWVPPYHV3Q.data delete mode 100644 test/test-repo/blocks/VP/CIQL7ROGT73FOVB23WYGP4F6LU5WSQKMNZHPN6IGKKQU62FWLSH7VPY.data delete mode 100644 test/test-repo/blocks/WS/CIQDIIAHGSIWR6TXBONXKL3LKJHZWQMXKJO7ZJOGM6MYLW3OGOLIWSA.data delete mode 100644 test/test-repo/blocks/X3/CIQFTFEEHEDF6KLBT32BFAGLXEZL4UWFNWM4LFTLMXQBCERZ6CMLX3Y.data delete mode 100644 test/test-repo/blocks/_README delete mode 100644 test/test-repo/config delete mode 100644 test/test-repo/datastore/000051.log delete mode 100644 test/test-repo/datastore/000053.ldb delete mode 100644 test/test-repo/datastore/CURRENT delete mode 100644 test/test-repo/datastore/LOCK delete mode 100644 test/test-repo/datastore/LOG delete mode 100644 test/test-repo/datastore/MANIFEST-000052 delete mode 100644 test/test-repo/datastore_spec delete mode 100644 test/test-repo/version diff --git a/src/backends.js b/src/backends.js deleted file mode 100644 index f711d448..00000000 --- a/src/backends.js +++ /dev/null @@ -1,25 +0,0 @@ -'use strict' - -/** - * @typedef {import('interface-datastore').Datastore} Datastore - * @typedef {import('./types').Backends} Backends - * @typedef {Required} Options - */ - -/** - * - * @param {Backends} name - * @param {string} path - * @param {Options} options - * @returns {Datastore} - */ -function createBackend (name, path, options) { - const Ctor = options.storageBackends[name] - const backendOptions = Object.assign({}, options.storageBackendOptions[name] || {}) - // @ts-ignore we don't have a signature for the constructor - return new Ctor(path, backendOptions) -} - -module.exports = { - create: createBackend -} diff --git a/src/blockstore.js b/src/blockstore.js deleted file mode 100644 index 2a3aecda..00000000 --- a/src/blockstore.js +++ /dev/null @@ -1,185 +0,0 @@ -'use strict' - -const { shard, ShardingDatastore } = require('datastore-core') -const { cidToKey, keyToCid } = require('./blockstore-utils') -const drain = require('it-drain') -const pushable = require('it-pushable') - -/** - * @typedef {import('multiformats/cid').CID} CID - * @typedef {import('interface-datastore').Datastore} Datastore - * @typedef {import('interface-store').Options} DatastoreOptions - * @typedef {import('interface-blockstore').Blockstore} Blockstore - */ - -/** - * - * @param {Datastore} filestore - * @param {*} options - */ -module.exports = (filestore, options) => { - const store = maybeWithSharding(filestore, options) - return createBaseStore(store) -} - -/** - * @param {Datastore} filestore - * @param {{ sharding: any; }} options - */ -function maybeWithSharding (filestore, options) { - if (options.sharding) { - return new ShardingDatastore(filestore, new shard.NextToLast(2)) - } - return filestore -} - -/** - * @param {Datastore | ShardingDatastore} store - * @returns {Blockstore} - */ -function createBaseStore (store) { - /** @type {Blockstore} */ - const bs = { - open () { - return store.open() - }, - - close () { - return store.close() - }, - - async * query (query, options) { - /** @type {import('interface-datastore').Query} */ - const storeQuery = { - prefix: query.prefix, - limit: query.limit, - offset: query.offset, - filters: (query.filters || []).map(filter => { - return ({ key, value }) => filter({ key: keyToCid(key), value }) - }), - orders: (query.orders || []).map(order => { - return (a, b) => order({ key: keyToCid(a.key), value: a.value }, { key: keyToCid(b.key), value: b.value }) - }) - } - - for await (const { key, value } of store.query(storeQuery, options)) { - yield { key: keyToCid(key), value } - } - }, - - async * queryKeys (query, options) { - /** @type {import('interface-datastore').KeyQuery} */ - const storeQuery = { - prefix: query.prefix, - limit: query.limit, - offset: query.offset, - filters: (query.filters || []).map(filter => { - return (key) => filter(keyToCid(key)) - }), - orders: (query.orders || []).map(order => { - return (a, b) => order(keyToCid(a), keyToCid(b)) - }) - } - - for await (const key of store.queryKeys(storeQuery, options)) { - yield keyToCid(key) - } - }, - - async get (cid, options) { - return store.get(cidToKey(cid), options) - }, - - async * getMany (cids, options) { - for await (const cid of cids) { - yield this.get(cid, options) - } - }, - - async put (cid, buf, options) { - const key = cidToKey(cid) - const exists = await store.has(key, options) - - if (!exists) { - await store.put(key, buf, options) - } - }, - - async * putMany (pairs, options) { // eslint-disable-line require-await - // we cannot simply chain to `store.putMany` because we convert a CID into - // a key based on the multihash only, so we lose the version & codec and - // cannot give the user back the CID they used to create the block, so yield - // to `store.putMany` but return the actual block the user passed in. - // - // nb. we want to use `store.putMany` here so bitswap can control batching - // up block HAVEs to send to the network - if we use multiple `store.put`s - // it will not be able to guess we are about to `store.put` more blocks - const output = pushable() - - // process.nextTick runs on the microtask queue, setImmediate runs on the next - // event loop iteration so is slower. Use process.nextTick if it is available. - const runner = process && process.nextTick ? process.nextTick : setImmediate - - runner(async () => { - try { - await drain(store.putMany(async function * () { - for await (const { key: cid, value } of pairs) { - const key = cidToKey(cid) - const exists = await store.has(key, options) - - if (!exists) { - yield { key, value } - } - - // there is an assumption here that after the yield has completed - // the underlying datastore has finished writing the block - output.push({ key: cid, value }) - } - }())) - - output.end() - } catch (err) { - output.end(err) - } - }) - - yield * output - }, - - has (cid, options) { - return store.has(cidToKey(cid), options) - }, - - delete (cid, options) { - return store.delete(cidToKey(cid), options) - }, - - deleteMany (cids, options) { - const out = pushable() - - drain(store.deleteMany((async function * () { - for await (const cid of cids) { - yield cidToKey(cid) - - out.push(cid) - } - - out.end() - }()), options)).catch(err => { - out.end(err) - }) - - return out - }, - - batch () { - return { - put (key, value) { }, - delete (key) { }, - commit: async (options) => { } - } - } - } - - return bs -} diff --git a/src/default-options-browser.js b/src/default-options-browser.js deleted file mode 100644 index 8914543c..00000000 --- a/src/default-options-browser.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict' - -// Default configuration for a repo in the browser -module.exports = { - autoMigrate: true, - onMigrationProgress: () => {}, - lock: 'memory', - storageBackends: { - root: require('datastore-level'), - blocks: require('datastore-level'), - keys: require('datastore-level'), - datastore: require('datastore-level'), - pins: require('datastore-level') - }, - storageBackendOptions: { - root: { - extension: '', - prefix: '', - version: 2 - }, - blocks: { - sharding: false, - prefix: '', - version: 2 - }, - keys: { - sharding: false, - prefix: '', - version: 2 - }, - datastore: { - sharding: false, - prefix: '', - version: 2 - }, - pins: { - sharding: false, - prefix: '', - version: 2 - } - } -} diff --git a/src/default-options.browser.js b/src/default-options.browser.js new file mode 100644 index 00000000..fcf8fdaf --- /dev/null +++ b/src/default-options.browser.js @@ -0,0 +1,9 @@ +'use strict' + +// Default configuration for a repo in the browser +module.exports = { + autoMigrate: true, + onMigrationProgress: () => {}, + repoOwner: true, + repoLock: require('./locks/memory') +} diff --git a/src/default-options.js b/src/default-options.js index cce52de5..a09146d7 100644 --- a/src/default-options.js +++ b/src/default-options.js @@ -3,30 +3,13 @@ // Default configuration for a repo in node.js /** - * @type {Required} + * @type {Partial} */ const defaultOptions = { autoMigrate: true, onMigrationProgress: () => {}, - lock: 'fs', - storageBackends: { - root: require('datastore-fs'), - blocks: require('datastore-fs'), - keys: require('datastore-fs'), - datastore: require('datastore-level'), - pins: require('datastore-level') - }, - storageBackendOptions: { - root: { - extension: '' - }, - blocks: { - sharding: true, - extension: '.data' - }, - keys: { - } - } + repoOwner: true, + repoLock: require('./locks/fs') } module.exports = defaultOptions diff --git a/src/gc.js b/src/gc.js new file mode 100644 index 00000000..727f1ed0 --- /dev/null +++ b/src/gc.js @@ -0,0 +1,159 @@ +'use strict' + +const { CID } = require('multiformats/cid') +const log = require('debug')('ipfs:repo:gc') +const { Errors } = require('interface-datastore') +const ERR_NOT_FOUND = Errors.notFoundError().code +const parallelBatch = require('it-parallel-batch') +const { pipe } = require('it-pipe') +const merge = require('it-merge') +const map = require('it-map') +const filter = require('it-filter') +const { Key } = require('interface-datastore') +const { base32 } = require('multiformats/bases/base32') +const walkDag = require('./utils/walk-dag') + +// Limit on the number of parallel block remove operations +const BLOCK_RM_CONCURRENCY = 256 + +const MFS_ROOT_KEY = new Key('/local/filesroot') + +/** + * @typedef {import('interface-blockstore').Blockstore} Blockstore + * @typedef {import('./types').loadCodec} loadCodec + */ + +/** + * Perform mark and sweep garbage collection + * + * @param {object} config + * @param {import('./types').GCLock} config.gcLock + * @param {import('./pins')} config.pins + * @param {Blockstore} config.blockstore + * @param {import('interface-datastore').Datastore} config.root + * @param {loadCodec} config.loadCodec + */ +module.exports = ({ gcLock, pins, blockstore, root, loadCodec }) => { + async function * gc () { + const start = Date.now() + log('Creating set of marked blocks') + + const release = await gcLock.writeLock() + + try { + // Mark all blocks that are being used + const markedSet = await createMarkedSet({ pins, blockstore, root, loadCodec }) + // Get all blocks keys from the blockstore + const blockKeys = blockstore.queryKeys({}) + + // Delete blocks that are not being used + yield * deleteUnmarkedBlocks({ blockstore }, markedSet, blockKeys) + + log(`Complete (${Date.now() - start}ms)`) + } finally { + release() + } + } + + return gc +} + +/** + * Get Set of CIDs of blocks to keep + * + * @param {object} config + * @param {import('./pins')} config.pins + * @param {import('interface-blockstore').Blockstore} config.blockstore + * @param {import('interface-datastore').Datastore} config.root + * @param {loadCodec} config.loadCodec + */ +async function createMarkedSet ({ pins, blockstore, loadCodec, root }) { + const mfsSource = (async function * () { + let mh + try { + mh = await root.get(MFS_ROOT_KEY) + } catch (err) { + if (err.code === ERR_NOT_FOUND) { + log('No blocks in MFS') + return + } + + throw err + } + + const rootCid = CID.decode(mh) + yield rootCid + yield * walkDag(rootCid, blockstore, loadCodec) + })() + + const pinsSource = merge( + map(pins.recursiveKeys(), ({ cid }) => cid), + pins.indirectKeys(), + map(pins.directKeys(), ({ cid }) => cid), + mfsSource + ) + + const output = new Set() + + for await (const cid of merge(pinsSource, mfsSource)) { + output.add(base32.encode(cid.multihash.bytes)) + } + + return output +} + +/** + * Delete all blocks that are not marked as in use + * + * @param {object} arg + * @param {Blockstore} arg.blockstore + * @param {Set} markedSet + * @param {AsyncIterable} blockKeys + */ +async function * deleteUnmarkedBlocks ({ blockstore }, markedSet, blockKeys) { + // Iterate through all blocks and find those that are not in the marked set + // blockKeys yields { key: Key() } + let blocksCount = 0 + let removedBlocksCount = 0 + + /** + * @param {CID} cid + */ + const removeBlock = async (cid) => { + return async function remove () { + blocksCount++ + + try { + const b32 = base32.encode(cid.multihash.bytes) + + if (markedSet.has(b32)) { + return null + } + + try { + await blockstore.delete(cid) + removedBlocksCount++ + } catch (err) { + return { + err: new Error(`Could not delete block with CID ${cid}: ${err.message}`) + } + } + + return { cid } + } catch (err) { + const msg = `Could delete block with CID ${cid}` + log(msg, err) + return { err: new Error(msg + `: ${err.message}`) } + } + } + } + + yield * pipe( + parallelBatch(map(blockKeys, removeBlock), BLOCK_RM_CONCURRENCY), + // filter nulls (blocks that were retained) + source => filter(source, Boolean) + ) + + log(`Marked set has ${markedSet.size} unique blocks. Blockstore has ${blocksCount} blocks. ` + + `Deleted ${removedBlocksCount} blocks.`) +} diff --git a/src/index.js b/src/index.js index ba89f011..c016649e 100644 --- a/src/index.js +++ b/src/index.js @@ -5,54 +5,53 @@ const debug = require('debug') const errCode = require('err-code') const migrator = require('ipfs-repo-migrations') const bytes = require('bytes') -const pathJoin = require('ipfs-utils/src/path-join') const merge = require('merge-options') const constants = require('./constants') -const backends = require('./backends') const version = require('./version') const config = require('./config') const spec = require('./spec') const apiAddr = require('./api-addr') -const createBlockstore = require('./blockstore') const createIdstore = require('./idstore') const defaultOptions = require('./default-options') const defaultDatastore = require('./default-datastore') const ERRORS = require('./errors') const Pins = require('./pins') const createPinnedBlockstore = require('./pinned-blockstore') +// @ts-ignore - no types +const mortice = require('mortice') +const gc = require('./gc') +const MemoryLock = require('./locks/memory') +const FSLock = require('./locks/fs') const log = debug('ipfs:repo') const noLimit = Number.MAX_SAFE_INTEGER const AUTO_MIGRATE_CONFIG_KEY = 'repoAutoMigrate' -/** @type {Record} */ -const lockers = { - memory: require('./lock-memory'), - fs: require('./lock') -} - /** * @typedef {import('./types').Options} Options - * @typedef {import('./types').Lock} Lock + * @typedef {import('./types').RepoLock} RepoLock * @typedef {import('./types').LockCloser} LockCloser + * @typedef {import('./types').GCLock} GCLock * @typedef {import('./types').Stat} Stat * @typedef {import('./types').Config} Config * @typedef {import('interface-datastore').Datastore} Datastore * @typedef {import('interface-blockstore').Blockstore} Blockstore + * @typedef {import('./types').Backends} Backends */ /** - * IpfsRepo implements all required functionality to read and write to an ipfs repo. + * IPFSRepo implements all required functionality to read and write to an ipfs repo. */ -class IpfsRepo { +class Repo { /** - * @param {string} repoPath - path where the repo is stored + * @param {string} path - Where this repo is stored * @param {import('./types').loadCodec} loadCodec - a function that will load multiformat block codecs - * @param {Options} [options] - Configuration + * @param {Backends} backends - backends used by this repo + * @param {Partial} [options] - Configuration */ - constructor (repoPath, loadCodec, options) { - if (typeof repoPath !== 'string') { + constructor (path, loadCodec, backends, options) { + if (typeof path !== 'string') { throw new Error('missing repo path') } @@ -60,21 +59,16 @@ class IpfsRepo { throw new Error('missing codec loader') } + /** @type {Options} */ this.options = merge(defaultOptions, options) this.closed = true - this.path = repoPath - - /** - * @private - */ - this._locker = this._getLocker() - this.root = backends.create('root', this.path, this.options) - this.datastore = backends.create('datastore', pathJoin(this.path, 'datastore'), this.options) - this.keys = backends.create('keys', pathJoin(this.path, 'keys'), this.options) + this.path = path + this.root = backends.root + this.datastore = backends.datastore + this.keys = backends.keys - const blocksBaseStore = backends.create('blocks', pathJoin(this.path, 'blocks'), this.options) - const blockstore = createBlockstore(blocksBaseStore, this.options.storageBackendOptions.blocks) - const pinstore = backends.create('pins', pathJoin(this.path, 'pins'), this.options) + const blockstore = backends.blocks + const pinstore = backends.pins this.pins = new Pins({ pinstore, blockstore, loadCodec }) @@ -88,6 +82,13 @@ class IpfsRepo { this.config = config(this.root) this.spec = spec(this.root) this.apiAddr = apiAddr(this.root) + + /** @type {GCLock} */ + this.gcLock = mortice(path, { + singleProcess: this.options.repoOwner !== false + }) + + this.gc = gc({ gcLock: this.gcLock, pins: this.pins, blockstore: this.blocks, root: this.root, loadCodec }) } /** @@ -145,14 +146,21 @@ class IpfsRepo { try { await this._openRoot() await this._checkInitialized() - this.lockfile = await this._openLock(this.path) + + this._lockfile = await this._openLock() log('acquired repo.lock') const isCompatible = await this.version.check(constants.repoVersion) if (!isCompatible) { if (await this._isAutoMigrationEnabled()) { - await this._migrate(constants.repoVersion) + await this._migrate(constants.repoVersion, { + root: this.root, + datastore: this.datastore, + pins: this.pins.pinstore, + blocks: this.pins.blockstore, + keys: this.keys + }) } else { throw new ERRORS.InvalidRepoVersionError('Incompatible repo versions. Automatic migrations disabled. Please migrate the repo manually.') } @@ -173,10 +181,10 @@ class IpfsRepo { this.closed = false log('all opened') } catch (err) { - if (this.lockfile) { + if (this._lockfile) { try { await this._closeLock() - this.lockfile = null + this._lockfile = null } catch (err2) { log('error removing lock', err2) } @@ -186,25 +194,6 @@ class IpfsRepo { } } - /** - * Returns the repo locker to be used. - * - * @private - */ - _getLocker () { - if (typeof this.options.lock === 'string') { - if (!lockers[this.options.lock]) { - throw new Error('Unknown lock type: ' + this.options.lock) - } - return lockers[this.options.lock] - } - - if (!this.options.lock) { - throw new Error('No lock provided') - } - return this.options.lock - } - /** * Opens the root backend, catching and ignoring an 'Already open' error * @@ -225,11 +214,10 @@ class IpfsRepo { * be returned in the callback if one has been created. * * @private - * @param {string} path * @returns {Promise} */ - async _openLock (path) { - const lockfile = await this._locker.lock(path) + async _openLock () { + const lockfile = await this.options.repoLock.lock(this.path) if (typeof lockfile.close !== 'function') { throw errCode(new Error('Locks must have a close method'), 'ERR_NO_CLOSE_FUNCTION') @@ -244,7 +232,7 @@ class IpfsRepo { * @private */ _closeLock () { - return this.lockfile && this.lockfile.close() + return this._lockfile && this._lockfile.close() } /** @@ -377,19 +365,20 @@ class IpfsRepo { * * @private * @param {number} toVersion + * @param {Backends} backends */ - async _migrate (toVersion) { + async _migrate (toVersion, backends) { const currentRepoVersion = await this.version.get() if (currentRepoVersion > toVersion) { log(`reverting to version ${toVersion}`) - return migrator.revert(this.path, this.options, toVersion, { + return migrator.revert(this.path, backends, this.options, toVersion, { ignoreLock: true, onProgress: this.options.onMigrationProgress }) } else { log(`migrating to version ${toVersion}`) - return migrator.migrate(this.path, this.options, toVersion, { + return migrator.migrate(this.path, backends, this.options, toVersion, { ignoreLock: true, onProgress: this.options.onMigrationProgress }) @@ -439,10 +428,27 @@ async function getSize (datastore) { return sum } -module.exports = IpfsRepo -module.exports.utils = { blockstore: require('./blockstore-utils') } -module.exports.repoVersion = constants.repoVersion -module.exports.errors = ERRORS +/** + * @param {string} path - Where this repo is stored + * @param {import('./types').loadCodec} loadCodec - a function that will load multiformat block codecs + * @param {import('./types').Backends} backends - backends used by this repo + * @param {Partial} [options] - Configuration + * @returns {import('./types').IPFSRepo} + */ +function createRepo (path, loadCodec, backends, options) { + return new Repo(path, loadCodec, backends, options) +} + +module.exports = { + createRepo, + repoVersion: constants.repoVersion, + errors: ERRORS, + utils: { blockstore: require('./utils/blockstore') }, + locks: { + memory: MemoryLock, + fs: FSLock + } +} /** * @param {import('./types').Config} _config diff --git a/src/lock.js b/src/locks/fs.js similarity index 91% rename from src/lock.js rename to src/locks/fs.js index a505c218..039a6b1d 100644 --- a/src/lock.js +++ b/src/locks/fs.js @@ -1,15 +1,15 @@ 'use strict' -const { LockExistsError } = require('./errors') +const { LockExistsError } = require('../errors') const path = require('path') const debug = require('debug') const { lock: properLock, check } = require('proper-lockfile') -const log = debug('ipfs:repo:lock') +const log = debug('ipfs:repo:lock:fs') const lockFile = 'repo.lock' /** - * @typedef {import('./types').LockCloser} LockCloser + * @typedef {import('../types').LockCloser} LockCloser */ /** diff --git a/src/lock-memory.js b/src/locks/memory.js similarity index 86% rename from src/lock-memory.js rename to src/locks/memory.js index f2047857..e28c2c75 100644 --- a/src/lock-memory.js +++ b/src/locks/memory.js @@ -1,9 +1,9 @@ 'use strict' -const errors = require('./errors') +const errors = require('../errors') const debug = require('debug') -const log = debug('ipfs:repo:lock') +const log = debug('ipfs:repo:lock:memory') const lockFile = 'repo.lock' @@ -11,7 +11,7 @@ const lockFile = 'repo.lock' const LOCKS = {} /** - * @typedef {import('./types').LockCloser} LockCloser + * @typedef {import('../types').LockCloser} LockCloser */ /** diff --git a/src/pins.js b/src/pins.js index 2471063a..355632cc 100644 --- a/src/pins.js +++ b/src/pins.js @@ -5,13 +5,14 @@ const { CID } = require('multiformats/cid') const errCode = require('err-code') const debug = require('debug') const first = require('it-first') -const all = require('it-all') +const drain = require('it-drain') const cborg = require('cborg') -const { Key } = require('interface-datastore') const dagPb = require('@ipld/dag-pb') -const dagCbor = require('@ipld/dag-cbor') -const { base32 } = require('multiformats/bases/base32') -const Digest = require('multiformats/hashes/digest') +const { + cidToKey, + keyToMultihash +} = require('./utils/blockstore') +const walkDag = require('./utils/walk-dag') /** * @typedef {object} Pin @@ -42,60 +43,6 @@ function invalidPinTypeErr (type) { return errCode(new Error(errMsg), 'ERR_INVALID_PIN_TYPE') } -/** - * @param {CID} cid - */ -function cidToKey (cid) { - return new Key(`/${base32.encode(cid.multihash.bytes).toUpperCase().substring(1)}`) -} - -/** - * @param {Key | string} key - */ -function keyToMultihash (key) { - return Digest.decode(base32.decode(`b${key.toString().toLowerCase().substring(1)}`)) -} - -// eslint-disable-next-line jsdoc/require-returns-check -/** - * @param {any} obj - * @param {string[]} path - * @param {boolean} parseBuffer - * @returns {Generator<[string, CID], void, undefined>} - */ -function * dagCborLinks (obj, path = [], parseBuffer = true) { - if (parseBuffer && Buffer.isBuffer(obj)) { - obj = cborg.decode(obj) - } - - for (const key of Object.keys(obj)) { - const _path = path.slice() - _path.push(key) - const val = obj[key] - - if (val && typeof val === 'object') { - if (Array.isArray(val)) { - for (let i = 0; i < val.length; i++) { - const __path = _path.slice() - __path.push(i.toString()) - const o = val[i] - if (CID.isCID(o)) { // eslint-disable-line max-depth - yield [__path.join('/'), o] - } else if (typeof o === 'object') { - yield * dagCborLinks(o, _path, false) - } - } - } else { - if (CID.isCID(val)) { - yield [_path.join('/'), val] - } else { - yield * dagCborLinks(val, _path, false) - } - } - } - } -} - const PinTypes = { /** @type {'direct'} */ direct: ('direct'), @@ -123,34 +70,6 @@ class Pins { this.recursivePins = new Set() } - /** - * @private - * @param {CID} cid - * @param {AbortOptions} [options] - * @returns {AsyncGenerator} - */ - async * _walkDag (cid, options) { - try { - const block = await this.blockstore.get(cid, options) - const codec = await this.loadCodec(cid.code) - const node = codec.decode(block) - - if (cid.code === dagPb.code) { - for (const link of node.Links) { - yield link.Hash - yield * this._walkDag(link.Hash, options) - } - } else if (cid.code === dagCbor.code) { - for (const [, childCid] of dagCborLinks(node)) { - yield childCid - yield * this._walkDag(childCid, options) - } - } - } catch (err) { - this.log('Could not walk DAG for CID', cid.toString(), err) - } - } - /** * @param {CID} cid * @param {PinOptions & AbortOptions} [options] @@ -267,7 +186,7 @@ class Pins { */ async * indirectKeys (options) { for await (const { cid } of this.recursiveKeys()) { - for await (const childCid of this._walkDag(cid, options)) { + for await (const childCid of walkDag(cid, this.blockstore, this.loadCodec, options)) { // recursive pins override indirect pins const types = [ PinTypes.recursive @@ -334,7 +253,7 @@ class Pins { */ async function * findChild (key, source) { for await (const { cid: parentCid } of source) { - for await (const childCid of self._walkDag(parentCid)) { + for await (const childCid of walkDag(parentCid, self.blockstore, self.loadCodec)) { if (childCid.equals(key)) { yield parentCid return @@ -370,7 +289,7 @@ class Pins { * @param {AbortOptions} options */ async fetchCompleteDag (cid, options) { - await all(this._walkDag(cid, options)) + await drain(walkDag(cid, this.blockstore, this.loadCodec, options)) } /** diff --git a/src/types.d.ts b/src/types.d.ts index 4eaf91ec..304a3da2 100644 --- a/src/types.d.ts +++ b/src/types.d.ts @@ -1,5 +1,6 @@ import type { Datastore } from 'interface-datastore' +import type { Blockstore } from 'interface-blockstore' import type { CID } from 'multiformats' import type { BlockCodec } from 'multiformats/codecs/interface' @@ -10,41 +11,150 @@ export interface Options { /** * Controls automatic migrations of repository. (defaults: true) */ - autoMigrate?: boolean + autoMigrate: boolean /** * Callback function to be notified of migration progress */ - onMigrationProgress?: (version: number, percentComplete: string, message: string) => void + onMigrationProgress: (version: number, percentComplete: string, message: string) => void + + /** + * If multiple processes are accessing the same repo - e.g. via node cluster or browser UI and webworkers + * one instance must be designated the repo owner to hold the lock on shared resources like the datastore. + * + * Set this property to true on one instance only if this is how your application is set up. + */ + repoOwner: boolean + + /** + * A lock implementation that prevents multiple processes accessing the same repo + */ + repoLock: RepoLock +} + +export interface IPFSRepo { + closed: boolean; + path: string; + root: Datastore; + datastore: Datastore; + keys: Datastore; + pins: Pins; + blocks: Blockstore; + + version: { + exists(): Promise; + get(): Promise; + set(version: number): Promise; + check(expected: number): Promise; + } + + config: { + getAll(options?: { + signal?: AbortSignal | undefined; + } | undefined): Promise; + get(key: string, options?: { + signal?: AbortSignal | undefined; + } | undefined): Promise; + set(key: string, value?: any, options?: { + signal?: AbortSignal | undefined; + } | undefined): Promise; + replace(value?: import("./types").Config | undefined, options?: { + signal?: AbortSignal | undefined; + } | undefined): Promise; + exists(): Promise; + } + + spec: { + exists(): Promise; + get(): Promise; + set(spec: any): Promise; + } + + apiAddr: { + get(): Promise; + set(value: string): Promise; + delete(): Promise; + } + + gcLock: GCLock + + gc: () => AsyncGenerator<{ + err: Error; + cid?: undefined; + } | { + cid: CID; + err?: undefined; + } | null, void, unknown> + + /** + * Initialize a new repo. + * + * @param {import('./types').Config} config - config to write into `config`. + * @returns {Promise} + */ + init(config: Config): Promise + /** - * What type of lock to use. Lock has to be acquired when opening. + * Check if the repo is already initialized. + * + * @returns {Promise} */ - lock?: Lock | 'fs' | 'memory' + isInitialized(): Promise /** - * Map for backends and implementation reference. - * - `root` (defaults to `datastore-fs` in Node.js and `datastore-level` in the browser) - * - `blocks` (defaults to `datastore-fs` in Node.js and `datastore-level` in the browser) - * - `keys` (defaults to `datastore-fs` in Node.js and `datastore-level` in the browser) - * - `datastore` (defaults to `datastore-level`) - * - `pins` (defaults to `datastore-level`) + * Open the repo. If the repo is already open an error will be thrown. + * If the repo is not initialized it will throw an error. + * + * @returns {Promise} */ - storageBackends?: Partial> + open(): Promise - storageBackendOptions?: Partial> + /** + * Close the repo and cleanup. + * + * @returns {Promise} + */ + close(): Promise + + /** + * Check if a repo exists. + * + * @returns {Promise} + */ + exists(): Promise + + /** + * Get repo status. + * + * @returns {Promise} + */ + stat(): Promise } -export type Backends = 'root' | 'blocks' | 'keys' | 'datastore' | 'pins' +export interface Backends { + root: Datastore + blocks: Blockstore + keys: Datastore + datastore: Datastore + pins: Datastore +} -export interface Lock { +export interface RepoLock { /** * Sets the lock if one does not already exist. If a lock already exists, should throw an error. */ - lock: (dir: string) => Promise + lock: (path: string) => Promise /** * Checks the existence of the lock. */ - locked: (dir: string) => Promise + locked: (path: string) => Promise +} + +export type ReleaseLock = () => void + +export interface GCLock { + readLock: () => Promise + writeLock: () => Promise } export interface LockCloser { diff --git a/src/blockstore-utils.js b/src/utils/blockstore.js similarity index 69% rename from src/blockstore-utils.js rename to src/utils/blockstore.js index 4c6c29d0..680a8944 100644 --- a/src/blockstore-utils.js +++ b/src/utils/blockstore.js @@ -2,18 +2,17 @@ const { Key } = require('interface-datastore') const { CID } = require('multiformats') -const mhd = require('multiformats/hashes/digest') const raw = require('multiformats/codecs/raw') const errCode = require('err-code') const { base32 } = require('multiformats/bases/base32') +const Digest = require('multiformats/hashes/digest') /** * Transform a cid to the appropriate datastore key. * * @param {CID} c - * @returns {Key} */ -exports.cidToKey = c => { +function cidToKey (c) { const cid = CID.asCID(c) if (cid == null) { @@ -31,11 +30,21 @@ exports.cidToKey = c => { * Hence it is highly probable that stored CID will differ from a CID retrieved from blockstore. * * @param {Key} key - * @returns {CID} */ -exports.keyToCid = key => { +function keyToCid (key) { // Block key is of the form / - const digest = mhd.decode(base32.decode('b' + key.toString().slice(1).toLowerCase())) + return CID.createV1(raw.code, keyToMultihash(key)) +} + +/** + * @param {Key | string} key + */ +function keyToMultihash (key) { + return Digest.decode(base32.decode(`b${key.toString().toLowerCase().substring(1)}`)) +} - return CID.createV1(raw.code, digest) +module.exports = { + cidToKey, + keyToCid, + keyToMultihash } diff --git a/src/utils/walk-dag.js b/src/utils/walk-dag.js new file mode 100644 index 00000000..c472ce3e --- /dev/null +++ b/src/utils/walk-dag.js @@ -0,0 +1,84 @@ +'use strict' + +const { CID } = require('multiformats/cid') +const cborg = require('cborg') +const dagPb = require('@ipld/dag-pb') +const dagCbor = require('@ipld/dag-cbor') +const log = require('debug')('ipfs:repo:utils:walk-dag') + +/** + * @typedef {import('interface-blockstore').Blockstore} Blockstore + * @typedef {import('../types').loadCodec} loadCodec + * @typedef {import('../types').AbortOptions} AbortOptions + */ + +/** + * @param {CID} cid + * @param {Blockstore} blockstore + * @param {loadCodec} loadCodec + * @param {AbortOptions} [options] + * @returns {AsyncGenerator} + */ +async function * walkDag (cid, blockstore, loadCodec, options) { + try { + const block = await blockstore.get(cid, options) + const codec = await loadCodec(cid.code) + const node = codec.decode(block) + + if (cid.code === dagPb.code) { + for (const link of node.Links) { + yield link.Hash + yield * walkDag(link.Hash, blockstore, loadCodec, options) + } + } else if (cid.code === dagCbor.code) { + for (const [, childCid] of dagCborLinks(node)) { + yield childCid + yield * walkDag(childCid, blockstore, loadCodec, options) + } + } + } catch (err) { + log('Could not walk DAG for CID', cid.toString(), err) + } +} + +// eslint-disable-next-line jsdoc/require-returns-check +/** + * @param {any} obj + * @param {string[]} path + * @param {boolean} parseBuffer + * @returns {Generator<[string, CID], void, undefined>} + */ +function * dagCborLinks (obj, path = [], parseBuffer = true) { + if (parseBuffer && Buffer.isBuffer(obj)) { + obj = cborg.decode(obj) + } + + for (const key of Object.keys(obj)) { + const _path = path.slice() + _path.push(key) + const val = obj[key] + + if (val && typeof val === 'object') { + if (Array.isArray(val)) { + for (let i = 0; i < val.length; i++) { + const __path = _path.slice() + __path.push(i.toString()) + const o = val[i] + if (CID.isCID(o)) { // eslint-disable-line max-depth + yield [__path.join('/'), o] + } else if (typeof o === 'object') { + yield * dagCborLinks(o, _path, false) + } + } + } else { + if (CID.isCID(val)) { + yield [_path.join('/'), val] + } else { + yield * dagCborLinks(val, _path, false) + } + } + } + } +} + +module.exports = walkDag diff --git a/test/blockstore-test.js b/test/blockstore-test.js index 2f895bbf..41510e7c 100644 --- a/test/blockstore-test.js +++ b/test/blockstore-test.js @@ -6,21 +6,22 @@ const { expect } = require('aegir/utils/chai') const { CID } = require('multiformats') const range = require('just-range') const tempDir = require('ipfs-utils/src/temp-dir') -const { cidToKey } = require('../src/blockstore-utils') -const IPFSRepo = require('../src') +const { createRepo } = require('../src') const drain = require('it-drain') const all = require('it-all') const first = require('it-first') const uint8ArrayFromString = require('uint8arrays/from-string') const uint8ArrayToString = require('uint8arrays/to-string') const uint8ArrayEquals = require('uint8arrays/equals') -const { Adapter } = require('interface-datastore') +const { BlockstoreAdapter } = require('interface-blockstore') const { sha256 } = require('multiformats/hashes/sha2') const { identity } = require('multiformats/hashes/identity') const raw = require('multiformats/codecs/raw') const dagCbor = require('@ipld/dag-cbor') const dagPb = require('@ipld/dag-pb') const loadCodec = require('./fixtures/load-codec') +const createBackend = require('./fixtures/create-backend') +const MemoryLock = require('../src/locks/memory') async function makePair () { const data = new TextEncoder().encode(`hello-${Math.random()}`) @@ -33,6 +34,7 @@ async function makePair () { /** * @typedef {import('interface-datastore').Key} Key * @typedef {import('interface-blockstore').Pair} Pair + * @typedef {import('../src/types').IPFSRepo} IPFSRepo */ /** @@ -204,43 +206,35 @@ module.exports = (repo) => { const data = uint8ArrayFromString(`TEST${Date.now()}`) const digest = await sha256.digest(data) const cid = CID.createV0(digest) - const key = cidToKey(cid) - - otherRepo = new IPFSRepo(tempDir(), loadCodec, { - storageBackends: { - blocks: class ExplodingBlockStore extends Adapter { - /** - * - * @param {Key} c - */ - async get (c) { - if (c.toString() === key.toString()) { - throw err - } - return new Uint8Array() - } - - async open () {} - - async close () {} - } - }, - storageBackendOptions: { - blocks: { - sharding: false + + class ExplodingBlockStore extends BlockstoreAdapter { + /** + * + * @param {CID} c + */ + async get (c) { + if (c.toString() === cid.toString()) { + throw err } + + return new Uint8Array() } + + async open () {} + + async close () {} + } + + otherRepo = createRepo(tempDir(), loadCodec, createBackend({ + blocks: new ExplodingBlockStore() + }), { + repoLock: MemoryLock }) await otherRepo.init({}) await otherRepo.open() - try { - await otherRepo.blocks.get(cid) - throw new Error('Should have thrown') - } catch (err2) { - expect(err2).to.deep.equal(err) - } + await expect(otherRepo.blocks.get(cid)).to.eventually.be.rejectedWith(err) }) it('can load an identity hash without storing first', async () => { @@ -333,51 +327,42 @@ module.exports = (repo) => { const data = uint8ArrayFromString(`TEST${Date.now()}`) const digest = await sha256.digest(data) const cid = CID.createV0(digest) - const key = cidToKey(cid) - - otherRepo = new IPFSRepo(tempDir(), loadCodec, { - storageBackends: { - blocks: class ExplodingBlockStore extends Adapter { - /** - * @param {Key} c - */ - async get (c) { - if (c.toString() === key.toString()) { - throw err - } - return new Uint8Array() - } - - async open () {} - - async close () {} - - /** - * @param {any} source - */ - async * getMany (source) { - for await (const c of source) { - yield this.get(c) - } - } + + class ExplodingBlockStore extends BlockstoreAdapter { + /** + * @param {CID} c + */ + async get (c) { + if (c.toString() === cid.toString()) { + throw err } - }, - storageBackendOptions: { - blocks: { - sharding: false + return new Uint8Array() + } + + async open () {} + + async close () {} + + /** + * @param {any} source + */ + async * getMany (source) { + for await (const c of source) { + yield this.get(c) } } + } + + otherRepo = createRepo(tempDir(), loadCodec, createBackend({ + blocks: new ExplodingBlockStore() + }), { + repoLock: MemoryLock }) await otherRepo.init({}) await otherRepo.open() - try { - await drain(otherRepo.blocks.getMany([cid])) - throw new Error('Should have thrown') - } catch (err2) { - expect(err2).to.deep.equal(err) - } + await expect(drain(otherRepo.blocks.getMany([cid]))).to.eventually.be.rejectedWith(err) }) }) @@ -501,7 +486,7 @@ module.exports = (repo) => { it('returns some of the blocks', async () => { const blocksWithPrefix = await all(repo.blocks.query({ - prefix: cidToKey(pair1.key).toString().substring(0, 10) + prefix: pair1.key.toString().substring(0, 13) })) const block = blocksWithPrefix.find(({ key, value }) => uint8ArrayToString(value, 'base64') === uint8ArrayToString(pair1.value, 'base64')) diff --git a/test/browser.js b/test/browser.js index 7660c76a..f6c0732f 100644 --- a/test/browser.js +++ b/test/browser.js @@ -2,14 +2,22 @@ 'use strict' -const IPFSRepo = require('../src') +const { createRepo } = require('../src') const loadCodec = require('./fixtures/load-codec') +const { MemoryDatastore } = require('interface-datastore') +const { MemoryBlockstore } = require('interface-blockstore') async function createTempRepo (options = {}) { const date = Date.now().toString() const repoPath = 'test-repo-for-' + date - const repo = new IPFSRepo(repoPath, loadCodec, options) + const repo = createRepo(repoPath, loadCodec, { + blocks: new MemoryBlockstore(), + datastore: new MemoryDatastore(), + root: new MemoryDatastore(), + keys: new MemoryDatastore(), + pins: new MemoryDatastore() + }, options) await repo.init({}) await repo.open() @@ -20,7 +28,13 @@ describe('IPFS Repo Tests on the Browser', () => { require('./options-test') require('./migrations-test')(createTempRepo) - const repo = new IPFSRepo('myrepo', loadCodec) + const repo = createRepo('myrepo', loadCodec, { + blocks: new MemoryBlockstore(), + datastore: new MemoryDatastore(), + root: new MemoryDatastore(), + keys: new MemoryDatastore(), + pins: new MemoryDatastore() + }) before(async () => { await repo.init({}) diff --git a/test/config-test.js b/test/config-test.js index f1aa482d..1ef1bbd9 100644 --- a/test/config-test.js +++ b/test/config-test.js @@ -4,8 +4,7 @@ const { expect } = require('aegir/utils/chai') /** - * - * @param {import('../src')} repo + * @param {import('../src/types').IPFSRepo} repo */ module.exports = (repo) => { describe('config', () => { diff --git a/test/datastore-test.js b/test/datastore-test.js index 27c9812f..3ef811cd 100644 --- a/test/datastore-test.js +++ b/test/datastore-test.js @@ -6,13 +6,9 @@ const { expect } = require('aegir/utils/chai') const range = require('just-range') const Key = require('interface-datastore').Key const uint8ArrayFromString = require('uint8arrays/from-string') -/** - * @typedef {import('../src/index')} Repo - */ /** - * - * @param {Repo} repo + * @param {import('../src/types').IPFSRepo} repo */ module.exports = (repo) => { describe('datastore', () => { diff --git a/test/fixtures/create-backend.js b/test/fixtures/create-backend.js new file mode 100644 index 00000000..7ceec596 --- /dev/null +++ b/test/fixtures/create-backend.js @@ -0,0 +1,19 @@ +'use strict' + +const { MemoryDatastore } = require('interface-datastore') +const BlockstoreDatastoreAdapter = require(('blockstore-datastore-adapter')) + +function createBackend (overrides = {}) { + return { + datastore: new MemoryDatastore(), + blocks: new BlockstoreDatastoreAdapter( + new MemoryDatastore() + ), + pins: new MemoryDatastore(), + keys: new MemoryDatastore(), + root: new MemoryDatastore(), + ...overrides + } +} + +module.exports = createBackend diff --git a/test/interop-test.js b/test/interop-test.js deleted file mode 100644 index 43e5dd62..00000000 --- a/test/interop-test.js +++ /dev/null @@ -1,40 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const { expect } = require('aegir/utils/chai') -const { CID } = require('multiformats/cid') -const Key = require('interface-datastore').Key -const uint8ArrayToString = require('uint8arrays/to-string') -const uint8ArrayFromString = require('uint8arrays/from-string') - -/** - * @param {import('../src/index')} repo - */ -module.exports = (repo) => { - describe('interop', () => { - it('reads welcome-to-ipfs', async () => { - const welcomeHash = CID.decode( - uint8ArrayFromString('1220120f6af601d46e10b2d2e11ed71c55d25f3042c22501e41d1246e7a1e9d3d8ec', 'base16') - ) - - const val = await repo.blocks.get(welcomeHash) - expect(uint8ArrayToString(val)).to.match(/Hello and Welcome to IPFS/) - }) - - it('reads a bunch of blocks', async () => { - const cids = [ - 'QmUxpzJGJYTK5AzH36jV9ucM2WdF5KhjANb4FAhqnREzuC', - 'QmQbb26h9dcU5iNPMNEzYZnZN9YLTXBtFwuHmmo6YU4Aig' - ].map((hash) => CID.parse(hash)) - - const values = await Promise.all(cids.map((cid) => repo.blocks.get(cid))) - expect(values.length).to.equal(2) - expect(values.map((value) => value.length)).to.eql([2659, 12783]) - }) - - it('reads DHT records from the datastore', async () => { - const val = await repo.datastore?.get(new Key('/AHE5I5B7TY')) - expect(uint8ArrayToString(val, 'base16')).to.eql('0a0601c9d4743f9e12097465737476616c75651a2212201d22e2a5e140e5cd20d88fc59cd560f4887c7d9acf938ddb24d7207eac40fd2f') - }) - }) -} diff --git a/test/is-initialized.js b/test/is-initialized.js index d20db9f8..f957626e 100644 --- a/test/is-initialized.js +++ b/test/is-initialized.js @@ -4,19 +4,23 @@ const { expect } = require('aegir/utils/chai') const tempDir = require('ipfs-utils/src/temp-dir') -const IPFSRepo = require('../src') +const { createRepo } = require('../src') const loadCodec = require('./fixtures/load-codec') +const createBackend = require('./fixtures/create-backend') +const MemoryLock = require('../src/locks/memory') /** - * @typedef {import('../src/index')} Repo + * @typedef {import('../src/types').IPFSRepo} IPFSRepo */ describe('isInitialized', () => { - /** @type {Repo} */ + /** @type {IPFSRepo} */ let repo beforeEach(() => { - repo = new IPFSRepo(tempDir(b => 'test-repo-for-' + b), loadCodec) + repo = createRepo(tempDir(b => 'test-repo-for-' + b), loadCodec, createBackend(), { + repoLock: MemoryLock + }) }) it('should be false before initialization', async () => { diff --git a/test/keystore-test.js b/test/keystore-test.js index f328b992..f45cdf2d 100644 --- a/test/keystore-test.js +++ b/test/keystore-test.js @@ -3,13 +3,9 @@ 'use strict' const { expect } = require('aegir/utils/chai') -/** - * @typedef {import('../src/index')} Repo - */ /** - * - * @param {Repo} repo + * @param {import('../src/types').IPFSRepo} repo */ module.exports = (repo) => { describe('keystore', () => { diff --git a/test/lock-test.js b/test/lock-test.js index be9a1193..2259d1fb 100644 --- a/test/lock-test.js +++ b/test/lock-test.js @@ -2,13 +2,13 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const IPFSRepo = require('../') -const lockMemory = require('../src/lock-memory') +const { createRepo } = require('../') +const lockMemory = require('../src/locks/memory') const { LockExistsError } = require('./../src/errors') const loadCodec = require('./fixtures/load-codec') /** - * @param {import('../src/index')} repo + * @param {import('../src/types').IPFSRepo} repo */ module.exports = (repo) => { describe('Repo lock tests', () => { @@ -19,7 +19,13 @@ module.exports = (repo) => { }) it('should prevent multiple repos from using the same path', async () => { - const repoClone = new IPFSRepo(repo.path, loadCodec, repo.options) + const repoClone = createRepo(repo.path, loadCodec, { + blocks: repo.pins.blockstore, + datastore: repo.datastore, + root: repo.root, + keys: repo.keys, + pins: repo.pins.pinstore + }, repo.options) try { await repoClone.init({}) await repoClone.open() diff --git a/test/migrations-test.js b/test/migrations-test.js index 9f000314..e3ba86c8 100644 --- a/test/migrations-test.js +++ b/test/migrations-test.js @@ -8,18 +8,19 @@ const sinon = require('sinon') const migrator = require('ipfs-repo-migrations') const constants = require('../src/constants') const errors = require('../src/errors') -const IPFSRepo = require('../src') -const loadCodec = require('./fixtures/load-codec') +const { createRepo } = require('../src') + /** - * @typedef {import('../src/index')} Repo + * @typedef {import('../src/types').IPFSRepo} IPFSRepo + * @typedef {import('../src/types').Options} Options */ /** - * @param {(options? : any)=> Promise} createTempRepo + * @param {(options?: Partial)=> Promise} createTempRepo */ module.exports = (createTempRepo) => { describe('Migrations tests', () => { - /** @type {Repo} */ + /** @type {IPFSRepo} */ let repo /** @type {sinon.SinonStub} */ let migrateStub @@ -74,9 +75,16 @@ module.exports = (createTempRepo) => { await repo.version.set(7) await repo.close() + // @ts-expect-error options is a private field const newOpts = Object.assign({}, repo.options) newOpts.autoMigrate = option - const newRepo = new IPFSRepo(repo.path, loadCodec, newOpts) + const newRepo = createRepo(repo.path, repo.pins.loadCodec, { + blocks: repo.pins.blockstore, + datastore: repo.datastore, + root: repo.root, + keys: repo.keys, + pins: repo.pins.pinstore + }, newOpts) expect(migrateStub.called).to.be.false() @@ -116,12 +124,14 @@ module.exports = (createTempRepo) => { expect(migrateStub.called).to.be.false() + // @ts-expect-error options is a private field repo.options.onMigrationProgress = sinon.stub() await repo.open() expect(migrateStub.called).to.be.true() - expect(migrateStub.getCall(0).args[3]).to.have.property('onProgress', repo.options.onMigrationProgress) + // @ts-expect-error options is a private field + expect(migrateStub.getCall(0).args[4]).to.have.property('onProgress', repo.options.onMigrationProgress) }) it('should not migrate when versions matches', async () => { @@ -165,12 +175,14 @@ module.exports = (createTempRepo) => { expect(revertStub.called).to.be.false() + // @ts-expect-error options is a private field repo.options.onMigrationProgress = sinon.stub() await repo.open() expect(revertStub.called).to.be.true() - expect(revertStub.getCall(0).args[3]).to.have.property('onProgress', repo.options.onMigrationProgress) + // @ts-expect-error options is a private field + expect(revertStub.getCall(0).args[4]).to.have.property('onProgress', repo.options.onMigrationProgress) }) }) } diff --git a/test/node.js b/test/node.js index 6d4d4ec7..6098620c 100644 --- a/test/node.js +++ b/test/node.js @@ -1,34 +1,26 @@ /* eslint-env mocha */ 'use strict' -const ncp = require('ncp').ncp -const rimraf = require('rimraf') -const fs = require('fs') -const path = require('path') -const promisify = require('util').promisify -const os = require('os') -const { LockExistsError } = require('../src/errors') const loadCodec = require('./fixtures/load-codec') - -const asyncRimraf = promisify(rimraf) -const asyncNcp = promisify(ncp) -const fsstat = promisify(fs.stat) +const MemoryLock = require('../src/locks/memory') +const createBackend = require('./fixtures/create-backend') /** * @typedef {import('multiformats/codecs/interface').BlockCodec} BlockCodec * @typedef {import('../src/types').Options} Options */ -const IPFSRepo = require('../src') +const { createRepo } = require('../src') /** - * @param {Options} options + * @param {Partial} [options] */ -async function createTempRepo (options) { - const date = Date.now().toString() - const repoPath = path.join(os.tmpdir(), 'test-repo-for-' + date) - await asyncNcp(path.join(__dirname, 'test-repo'), repoPath) - const repo = new IPFSRepo(repoPath, loadCodec, options) +async function createTempRepo (options = {}) { + const repo = createRepo(`/foo-${Math.random()}`, loadCodec, createBackend(), { + ...options, + repoLock: MemoryLock + }) + await repo.init({}) await repo.open() return repo } @@ -37,83 +29,34 @@ describe('IPFS Repo Tests onNode.js', () => { require('./options-test') require('./migrations-test')(createTempRepo) - const customLock = { - lockName: 'test.lock', - /** - * @param {string} dir - */ - lock: async (dir) => { - const isLocked = await customLock.locked(dir) - if (isLocked) { - throw new LockExistsError('already locked') - } - const lockPath = path.join(dir, customLock.lockName) - fs.writeFileSync(lockPath, '') - return { - close: () => asyncRimraf(lockPath) - } - }, - /** - * @param {string} dir - */ - locked: async (dir) => { - try { - await fsstat(path.join(dir, customLock.lockName)) - return true - } catch (err) { - return false - } - } - } - /** - * @type {Array<{name: string, opts?: Options, init: boolean}>} + * @type {Array<{name: string, opts?: Options}>} */ const repos = [ { name: 'default inited', - opts: undefined, - init: true + opts: undefined }, { name: 'memory', opts: { - lock: 'memory' - }, - init: true - }, - { - name: 'custom locker', - opts: { - lock: customLock - }, - init: true - }, - { - name: 'default existing', - opts: undefined, - init: false + repoLock: MemoryLock + } } ] repos.forEach((r) => describe(r.name, () => { - const testRepoPath = path.join(__dirname, 'test-repo') - const date = Date.now().toString() - const repoPath = path.join(os.tmpdir(), 'test-repo-for-' + date) - - const repo = new IPFSRepo(repoPath, loadCodec, r.opts) + const repo = createRepo(`repo-${Math.random()}`, loadCodec, createBackend(), { + repoLock: MemoryLock, + ...(r.opts || {}) + }) before(async () => { - if (r.init) { - await repo.init({}) - } else { - await asyncNcp(testRepoPath, repoPath) - } + await repo.init({}) await repo.open() }) after(async () => { await repo.close() - await asyncRimraf(repoPath) }) require('./repo-test')(repo) @@ -124,9 +67,6 @@ describe('IPFS Repo Tests onNode.js', () => { require('./lock-test')(repo) require('./config-test')(repo) require('./api-addr-test')() - if (!r.init) { - require('./interop-test')(repo) - } require('./pins-test')(repo) require('./is-initialized') })) diff --git a/test/options-test.js b/test/options-test.js index 84c00066..d5949b66 100644 --- a/test/options-test.js +++ b/test/options-test.js @@ -2,6 +2,7 @@ 'use strict' const { expect } = require('aegir/utils/chai') +const sinon = require('sinon') const tempDir = require('ipfs-utils/src/temp-dir') const { isNode } = require('ipfs-utils/src/env') const rimraf = require('rimraf') @@ -9,8 +10,9 @@ if (!rimraf.sync) { // browser rimraf.sync = noop } -const Repo = require('../') +const { createRepo } = require('../') const loadCodec = require('./fixtures/load-codec') +const createBackend = require('./fixtures/create-backend') describe('custom options tests', () => { const repoPath = tempDir() @@ -21,39 +23,49 @@ describe('custom options tests', () => { it('missing repoPath', () => { expect( // @ts-expect-error - () => new Repo() + () => createRepo() ).to.throw('missing repo path') }) it('default options', () => { - const repo = new Repo(repoPath, loadCodec) + const repo = createRepo(repoPath, loadCodec, createBackend()) + // @ts-expect-error options is a private field expect(repo.options).to.deep.equal(expectedRepoOptions()) }) - it('allows for a custom lock', () => { + it('allows for a custom lock', async () => { + const release = { + close () { return Promise.resolve() } + } + const lock = { /** - * @param {any} path + * @param {string} path */ - lock: async (path) => { - return Promise.resolve({ - close () { return Promise.resolve() } - }) + lock: (path) => { + return Promise.resolve(release) }, /** - * @param {any} path + * @param {string} path */ - locked: async (path) => { + locked: (path) => { return Promise.resolve(true) } } - const repo = new Repo(repoPath, loadCodec, { - lock + const lockSpy = sinon.spy(lock, 'lock') + const releaseSpy = sinon.spy(release, 'close') + + const repo = createRepo(repoPath, loadCodec, createBackend(), { + repoLock: lock }) - // @ts-ignore we should not be using private methods - expect(repo._getLocker()).to.deep.equal(lock) + await repo.init({}) + await repo.open() + await repo.close() + + expect(lockSpy.callCount).to.equal(1) + expect(releaseSpy.callCount).to.equal(1) }) it('ensures a custom lock has a .close method', async () => { @@ -74,9 +86,9 @@ describe('custom options tests', () => { } } - const repo = new Repo(repoPath, loadCodec, { - // @ts-expect-error - lock + const repo = createRepo(repoPath, loadCodec, createBackend(), { + // @ts-expect-error lock closer types are wrong + repoLock: lock }) let error try { @@ -95,5 +107,5 @@ function expectedRepoOptions () { if (isNode) { return require('../src/default-options') } - return require('../src/default-options-browser') + return require('../src/default-options.browser') } diff --git a/test/pins-test.js b/test/pins-test.js index 7320573a..bb9a207f 100644 --- a/test/pins-test.js +++ b/test/pins-test.js @@ -6,13 +6,9 @@ const { expect } = require('aegir/utils/chai') const range = require('just-range') const Key = require('interface-datastore').Key const uint8ArrayFromString = require('uint8arrays/from-string') -/** - * @typedef {import('../src/index')} Repo - */ /** - * - * @param {Repo} repo + * @param {import('../src/types').IPFSRepo} repo */ module.exports = (repo) => { describe('pins', () => { diff --git a/test/repo-test.js b/test/repo-test.js index f50eda43..e3aa979b 100644 --- a/test/repo-test.js +++ b/test/repo-test.js @@ -2,19 +2,19 @@ 'use strict' const { expect } = require('aegir/utils/chai') +const sinon = require('sinon') const tempDir = require('ipfs-utils/src/temp-dir') -const IPFSRepo = require('../') +const { createRepo } = require('../') const Errors = require('../src/errors') const bytes = require('bytes') -const { Adapter } = require('interface-datastore') +const { Adapter, MemoryDatastore } = require('interface-datastore') +const { MemoryBlockstore } = require('interface-blockstore') const loadCodec = require('./fixtures/load-codec') +const MemoryLock = require('../src/locks/memory') +const createBackend = require('./fixtures/create-backend') /** - * @typedef {import('interface-datastore').Key} Key - */ - -/** - * @param {import('../src/index')} repo + * @param {import('../src/types').IPFSRepo} repo */ module.exports = (repo) => { describe('IPFS Repo Tests', () => { @@ -128,65 +128,25 @@ module.exports = (repo) => { }) it('should close all the datastores', async () => { - let count = 0 - class FakeDatastore extends Adapter { - constructor () { - super() - /** @type {Record} */ - this.data = {} - } - - async open () {} - - /** - * @param {Key} key - * @param {Uint8Array} val - */ - async put (key, val) { - this.data[key.toString()] = val - } - - /** - * @param {Key} key - */ - async get (key) { - const exists = await this.has(key) - if (!exists) throw new Errors.NotFoundError() - return this.data[key.toString()] - } - - /** - * @param {Key} key - */ - async has (key) { - return this.data[key.toString()] !== undefined - } - - /** - * @param {Key} key - */ - async delete (key) { - delete this.data[key.toString()] - } - - async close () { - count++ - } - } - const repo = new IPFSRepo(tempDir(), loadCodec, { - lock: 'memory', - storageBackends: { - root: FakeDatastore, - blocks: FakeDatastore, - keys: FakeDatastore, - datastore: FakeDatastore, - pins: FakeDatastore - } + const memoryDs = new MemoryDatastore() + const memoryBs = new MemoryBlockstore() + const spyDs = sinon.spy(memoryDs, 'close') + const spyBs = sinon.spy(memoryBs, 'close') + + const repo = createRepo(tempDir(), loadCodec, { + root: memoryDs, + blocks: memoryBs, + keys: memoryDs, + datastore: memoryDs, + pins: memoryDs + }, { + repoLock: MemoryLock }) await repo.init({}) await repo.open() await repo.close() - expect(count).to.be.eq(5) + + expect(spyDs.callCount + spyBs.callCount).to.equal(5) }) it('open twice throws error', async () => { @@ -201,22 +161,22 @@ module.exports = (repo) => { }) it('should throw non-already-open errors when opening the root', async () => { - const otherRepo = new IPFSRepo(tempDir(), loadCodec) + const otherRepo = createRepo(tempDir(), loadCodec, createBackend(), { + repoLock: MemoryLock + }) const err = new Error('wat') otherRepo.root.open = () => { throw err } - try { - await otherRepo.init({}) - } catch (err2) { - expect(err2).to.deep.equal(err) - } + return expect(otherRepo.init({})).to.eventually.be.rejectedWith(err) }) it('should ignore non-already-open errors when opening the root', async () => { - const otherRepo = new IPFSRepo(tempDir(), loadCodec) + const otherRepo = createRepo(tempDir(), loadCodec, createBackend(), { + repoLock: MemoryLock + }) const err = new Error('Already open') let threwError = false @@ -252,33 +212,24 @@ module.exports = (repo) => { }) it('should remove the lockfile when opening the repo fails', async () => { - otherRepo = new IPFSRepo(tempDir(), loadCodec, { - storageBackends: { - datastore: ExplodingDatastore, - blocks: ExplodingDatastore, - pins: ExplodingDatastore, - keys: ExplodingDatastore - // root: ExplodingDatastore - } + otherRepo = createRepo(tempDir(), loadCodec, createBackend({ + datastore: new ExplodingDatastore() + }), { + repoLock: MemoryLock }) - try { - await otherRepo.init({}) - await otherRepo.open() - } catch (err) { - expect(otherRepo.lockfile).to.be.null() - } + await otherRepo.init({}) + await expect(otherRepo.open()).to.eventually.be.rejectedWith('wat') + + // @ts-expect-error - _lockfile not part of the interface + expect(otherRepo._lockfile).to.be.null() }) it('should re-throw the original error even when removing the lockfile fails', async () => { - otherRepo = new IPFSRepo(tempDir(), loadCodec, { - storageBackends: { - datastore: ExplodingDatastore, - blocks: ExplodingDatastore, - pins: ExplodingDatastore, - keys: ExplodingDatastore, - root: ExplodingDatastore - } + otherRepo = createRepo(tempDir(), loadCodec, createBackend({ + datastore: new ExplodingDatastore() + }), { + repoLock: MemoryLock }) // @ts-ignore we should not be using private stuff @@ -295,14 +246,8 @@ module.exports = (repo) => { }) it('should throw when repos are not initialised', async () => { - otherRepo = new IPFSRepo(tempDir(), loadCodec, { - storageBackends: { - datastore: ExplodingDatastore, - blocks: ExplodingDatastore, - pins: ExplodingDatastore, - keys: ExplodingDatastore - // root: ExplodingDatastore - } + otherRepo = createRepo(tempDir(), loadCodec, createBackend(), { + repoLock: MemoryLock }) try { @@ -313,7 +258,9 @@ module.exports = (repo) => { }) it('should throw when config is not set', async () => { - otherRepo = new IPFSRepo(tempDir(), loadCodec) + otherRepo = createRepo(tempDir(), loadCodec, createBackend(), { + repoLock: MemoryLock + }) otherRepo.config.exists = async () => false otherRepo.spec.exists = async () => true otherRepo.version.check = async () => false @@ -328,7 +275,9 @@ module.exports = (repo) => { it('should return the max storage stat when set', async () => { const maxStorage = '1GB' - otherRepo = new IPFSRepo(tempDir(), loadCodec) + otherRepo = createRepo(tempDir(), loadCodec, createBackend(), { + repoLock: MemoryLock + }) await otherRepo.init({}) await otherRepo.open() await otherRepo.config.set('Datastore.StorageMax', maxStorage) @@ -340,7 +289,9 @@ module.exports = (repo) => { }) it('should throw unexpected errors when closing', async () => { - otherRepo = new IPFSRepo(tempDir(), loadCodec) + otherRepo = createRepo(tempDir(), loadCodec, createBackend(), { + repoLock: MemoryLock + }) await otherRepo.init({}) await otherRepo.open() @@ -359,7 +310,9 @@ module.exports = (repo) => { }) it('should swallow expected errors when closing', async () => { - otherRepo = new IPFSRepo(tempDir(), loadCodec) + otherRepo = createRepo(tempDir(), loadCodec, createBackend(), { + repoLock: MemoryLock + }) await otherRepo.init({}) await otherRepo.open() @@ -373,7 +326,9 @@ module.exports = (repo) => { }) it('should throw unexpected errors when checking if the repo has been initialised', async () => { - otherRepo = new IPFSRepo(tempDir(), loadCodec) + otherRepo = createRepo(tempDir(), loadCodec, createBackend(), { + repoLock: MemoryLock + }) otherRepo.config.exists = async () => { return true diff --git a/test/stat-test.js b/test/stat-test.js index dac0e24a..c47e8421 100644 --- a/test/stat-test.js +++ b/test/stat-test.js @@ -4,8 +4,9 @@ const { expect } = require('aegir/utils/chai') const { CID } = require('multiformats/cid') const uint8ArrayFromString = require('uint8arrays/from-string') + /** - * @param {import('../src/index')} repo + * @param {import('../src/types').IPFSRepo} repo */ module.exports = (repo) => { describe('stat', () => { diff --git a/test/test-repo/api b/test/test-repo/api deleted file mode 100644 index ec31f33a..00000000 --- a/test/test-repo/api +++ /dev/null @@ -1 +0,0 @@ -/ip4/127.0.0.1/tcp/5001 \ No newline at end of file diff --git a/test/test-repo/blocks/7J/CIQKKLBWAIBQZOIS5X7E32LQAL6236OUKZTMHPQSFIXPWXNZHQOV7JQ.data b/test/test-repo/blocks/7J/CIQKKLBWAIBQZOIS5X7E32LQAL6236OUKZTMHPQSFIXPWXNZHQOV7JQ.data deleted file mode 100644 index 9290f631..00000000 --- a/test/test-repo/blocks/7J/CIQKKLBWAIBQZOIS5X7E32LQAL6236OUKZTMHPQSFIXPWXNZHQOV7JQ.data +++ /dev/null @@ -1,56 +0,0 @@ - -� -� - IPFS -- Inter-Planetary File system - -IPFS is a global, versioned, peer-to-peer filesystem. It combines good ideas -from Git, BitTorrent, Kademlia, SFS, and the Web. It is like a single bit- -torrent swarm, exchanging git objects. IPFS provides an interface as simple -as the HTTP web, but with permanence built in. You can also mount the world -at /ipfs. - -IPFS is a protocol: -- defines a content-addressed file system -- coordinates content delivery -- combines Kademlia + BitTorrent + Git - -IPFS is a filesystem: -- has directories and files -- mountable filesystem (via FUSE) - -IPFS is a web: -- can be used to view documents like the web -- files accessible via HTTP at `http://ipfs.io/` -- browsers or extensions can learn to use `ipfs://` directly -- hash-addressed content guarantees authenticity - -IPFS is modular: -- connection layer over any network protocol -- routing layer -- uses a routing layer DHT (kademlia/coral) -- uses a path-based naming service -- uses bittorrent-inspired block exchange - -IPFS uses crypto: -- cryptographic-hash content addressing -- block-level deduplication -- file integrity + versioning -- filesystem-level encryption + signing support - -IPFS is p2p: -- worldwide peer-to-peer file transfers -- completely decentralized architecture -- **no** central point of failure - -IPFS is a cdn: -- add a file to the filesystem locally, and it's now available to the world -- caching-friendly (content-hash naming) -- bittorrent-based bandwidth distribution - -IPFS has a name service: -- IPNS, an SFS inspired name system -- global namespace based on PKI -- serves to build trust chains -- compatible with other NSes -- can map DNS, .onion, .bit, etc to IPNS -� diff --git a/test/test-repo/blocks/EP/CIQOEA4URV75WXHSIBY7SYTVOFIEO5657YM3T6DTHFRQYSUZAGRLEPY.data b/test/test-repo/blocks/EP/CIQOEA4URV75WXHSIBY7SYTVOFIEO5657YM3T6DTHFRQYSUZAGRLEPY.data deleted file mode 100644 index 02f8aad3477a9a363c88b95501fa0e2316f56b5d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 10933 zcmWgA;8GG&c)a0=(~LhdBC~#fIWaSTr;7SI$0sSXC;Kd!I#aqLXh*0Jg9L-nsLIh$ z8ciXixns0s7%de?YlP9-aI`iYtqn(O!_nGsv^E^A4M%Ik(b{mdHXN-DM{C2;+HkZs z9IXvUYs1mnaI`iYtqn(O!_nGsv^E^A4M%Ik(b{mdHXN-DM{C2;+HkZs9IXvUYs1mn zaI`iYG_|2(*URsaq^n08;K6y{ppKiy^cjTsS?c*%2MXdjfoYG>+7O$B& z?~72w>77RVf{y0SJUT5q^?{&}<%b!&-y>8eWi5BUa?WI1NdLlFGhIC2hpQ()d2n6o zsO_i7NtQoU>Jci_pS;~P({umyr@@UH-;}=b2mIK$d}T Implementation of the IPFS repo spec (https://github.com/ipfs/specs/tree/master/repo) in JavaScript - -This is the implementation of the [IPFS repo spec](https://github.com/ipfs/specs/tree/master/repo) in JavaScript. - -## Table of Contents - -- [Background](#background) -- [Install](#install) - - [npm](#npm) - - [Use in Node.js](#use-in-nodejs) - - [Use in a browser with browserify, webpack or any other bundler](#use-in-a-browser-with-browserify-webpack-or-any-other-bundler) - - [Use in a browser Using a script tag](#use-in-a-browser-using-a-script-tag) -- [Usage](#usage) -- [API](#api) -- [Contribute](#contribute) -- [License](#license) - -## Background - -Here is the architectural reasoning for this repo: - -```bash - ┌────────────────────────────────────────┐ - │ IPFSRepo │ - └────────────────────────────────────────┘ - ┌─────────────────┐ - │ / │ - ├─────────────────┤ - │ Datastore │ - └─────────────────┘ - │ - ┌───────────┴───────────┐ - │ │ - ┌─────────────────┐ ┌─────────────────┐ - │ /blocks │ │ /datastore │ - ├─────────────────┤ ├─────────────────┤ - │ Datastore │ │ LevelDatastore │ - └─────────────────┘ └─────────────────┘ - -┌────────────────────────────────────────┐ ┌────────────────────────────────────────┐ -│ IPFSRepo - Default Node.js │ │ IPFSRepo - Default Browser │ -└────────────────────────────────────────┘ └────────────────────────────────────────┘ - ┌─────────────────┐ ┌─────────────────┐ - │ / │ │ / │ - ├─────────────────┤ ├─────────────────┤ - │ FsDatastore │ │LevelJSDatastore │ - └─────────────────┘ └─────────────────┘ - │ │ - ┌───────────┴───────────┐ ┌───────────┴───────────┐ - │ │ │ │ -┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐ -│ /blocks │ │ /datastore │ │ /blocks │ │ /datastore │ -├─────────────────┤ ├─────────────────┤ ├─────────────────┤ ├─────────────────┤ -│ FlatfsDatastore │ │LevelDBDatastore │ │LevelJSDatastore │ │LevelJSDatastore │ -└─────────────────┘ └─────────────────┘ └─────────────────┘ └─────────────────┘ -``` - -This provides a well defined interface for creating and interacting with an IPFS repo. - -## Install - -### npm - -```sh -> npm install ipfs-repo -``` - -### Use in Node.js - -```js -var IPFSRepo = require('ipfs-repo') -``` - -### Use in a browser with browserify, webpack or any other bundler - -```js -var IPFSRepo = require('ipfs-repo') -``` - -### Use in a browser Using a script tag - -Loading this module through a script tag will make the `IpfsRepo` obj available in the global namespace. - -```html - - - -``` - -## Usage - -Example: - -```js -const Repo = require('ipfs-repo') -const repo = new Repo('/tmp/ipfs-repo') - -repo.init({ cool: 'config' }, (err) => { - if (err) { - throw err - } - - repo.open((err) => { - if (err) { - throw err - } - - console.log('repo is ready') - }) -}) -``` - -This now has created the following structure, either on disk or as an in memory representation: - -``` -├── blocks -│   ├── SHARDING -│ └── _README -├── config -├── datastore -└── version -``` - -## API - -### Setup - -#### `new Repo(path[, options])` - -Creates an IPFS Repo. - -Arguments: - -* `path` (string, mandatory): the path for this repo -* `options` (object, optional): may contain the following values - * `lock` (string, defaults to `"fs"` in Node.js, `"memory"` in the browser): what type of lock to use. Lock has to be acquired when opening. - * `storageBackends` (object, optional): may contain the following values, which should each be a class implementing the [datastore interface](https://github.com/ipfs/interface-datastore#readme): - * `root` (defaults to [`datastore-fs`](https://github.com/ipfs/js-datastore-fs#readme) in Node.js and [`datastore-level`](https://github.com/ipfs/js-datastore-level#readme) in the browser). Defines the back-end type used for gets and puts of values at the root (`repo.set()`, `repo.get()`) - * `blocks` (defaults to [`datastore-fs`](https://github.com/ipfs/js-datastore-fs#readme) in Node.js and [`datastore-level`](https://github.com/ipfs/js-datastore-level#readme) in the browser). Defines the back-end type used for gets and puts of values at `repo.blocks`. - * `datastore` (defaults to [`datastore-level`](https://github.com/ipfs/js-datastore-level#readme)). Defines the back-end type used as the key-valye store used for gets and puts of values at `repo.datastore`. - -```js -const repo = new Repo('path/to/repo') -``` - -#### `repo.init (callback)` - -Creates the necessary folder structure inside the repo. - -#### `repo.open (callback)` - -Locks the repo. - -#### `repo.close (callback)` - -Unlocks the repo. - -#### `repo.exists (callback)` - -Tells whether this repo exists or not. Calls back with `(err, bool)`. - -### Repos - -Root repo: - -#### `repo.put (key, value:Buffer, callback)` - -Put a value at the root of the repo. - -* `key` can be a buffer, a string or a [Key](https://github.com/ipfs/interface-datastore#keys). - -#### `repo.get (key, callback)` - -Get a value at the root of the repo. - -* `key` can be a buffer, a string or a [Key](https://github.com/ipfs/interface-datastore#keys). -* `callback` is a callback function `function (err, result:Buffer)` - -[Blocks](https://github.com/ipfs/js-ipfs-block#readme): - -#### `repo.blocks.put (block:Block, callback)` - -* `block` should be of type [Block](https://github.com/ipfs/js-ipfs-block#readme). - -#### `repo.blocks.putMany (blocks, callback)` - -Put many blocks. - -* `block` should be an array of type [Block](https://github.com/ipfs/js-ipfs-block#readme). - -#### `repo.blocks.get (cid, callback)` - -Get block. - -* `cid` is the content id of [type CID](https://github.com/ipld/js-cid#readme). -* `callback` is a callback function `function (err, result:Buffer)` - -Datastore: - -#### `repo.datastore` - -This is contains a full implementation of [the `interface-datastore` API](https://github.com/ipfs/interface-datastore#api). - - -### Utils - -#### `repo.config` - -Instead of using `repo.set('config')` this exposes an API that allows you to set and get a decoded config object, as well as, in a safe manner, change any of the config values individually. - -##### `repo.config.set(key:string, value, callback)` - -Set a config value. `value` can be any object that is serializable to JSON. - -* `key` is a string specifying the object path. Example: - -```js -repo.config.set('a.b.c', 'c value', (err) => { - if (err) { throw err } - repo.config.get((err, config) => { - if (err) { throw err } - assert.equal(config.a.b.c, 'c value') - }) -}) -``` - -##### `repo.config.get(value, callback)` - -Set the whole config value. `value` can be any object that is serializable to JSON. - -##### `repo.config.get(key:string, callback)` - -Get a config value. `callback` is a function with the signature: `function (err, value)`, wehre the ` -value` is of the same type that was set before. - -* `key` is a string specifying the object path. Example: - -```js -repo.config.get('a.b.c', (err, value) => { - if (err) { throw err } - console.log('config.a.b.c = ', value) -}) -``` - -##### `repo.config.get(callback)` - -Get the entire config value. `callback` is a function with the signature: `function (err, configValue:Object)`. - -#### `repo.config.exists(callback)` - -Whether the config sub-repo exists. Calls back with `(err, bool)`. - -#### `repo.version` - -##### `repo.version.get (callback)` - -Gets the repo version. - -##### `repo.version.set (version:number, callback)` - -Sets the repo version - -#### `repo.apiAddr` - -#### `repo.apiAddr.get (callback)` - -Gets the API address. - -#### `repo.apiAddr.set (value, callback)` - -Sets the API address. - -* `value` should be a [Multiaddr](https://github.com/multiformats/js-multiaddr) or a String representing a valid one. - -## Notes - -- [Explanation of how repo is structured](https://github.com/ipfs/js-ipfs-repo/pull/111#issuecomment-279948247) - -## Contribute - -There are some ways you can make this module better: - -- Consult our [open issues](https://github.com/ipfs/js-ipfs-repo/issues) and take on one of them -- Help our tests reach 100% coverage! - -This repository falls under the IPFS [Code of Conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md). - -[![](https://cdn.rawgit.com/jbenet/contribute-ipfs-gif/master/img/contribute.gif)](https://github.com/ipfs/community/blob/master/contributing.md) - -## License - -[MIT](LICENSE) -�c \ No newline at end of file diff --git a/test/test-repo/blocks/I7/CIQGE2VHIPKM4NFWKSH2DGUZIRE66VZHMPSOBVY2YU67EWMSHH4CI7Y.data b/test/test-repo/blocks/I7/CIQGE2VHIPKM4NFWKSH2DGUZIRE66VZHMPSOBVY2YU67EWMSHH4CI7Y.data deleted file mode 100644 index 39b8f166..00000000 --- a/test/test-repo/blocks/I7/CIQGE2VHIPKM4NFWKSH2DGUZIRE66VZHMPSOBVY2YU67EWMSHH4CI7Y.data +++ /dev/null @@ -1,93 +0,0 @@ - -��{ - "name": "ipfs-repo", - "version": "0.14.0", - "description": "IPFS Repo implementation", - "main": "src/index.js", - "browser": { - "rimraf": false, - "datastore-fs": "datastore-level", - "leveldown": "level-js", - "./src/lock.js": "./src/lock-memory.js", - "./src/default-options.js": "./src/default-options-browser.js" - }, - "scripts": { - "test": "aegir-test", - "test:node": "aegir-test node", - "test:browser": "aegir-test browser", - "build": "aegir-build", - "lint": "aegir-lint", - "release": "aegir-release --docs", - "release-minor": "aegir-release --type minor --docs", - "release-major": "aegir-release --type major --docs", - "coverage": "aegir-coverage", - "coverage-publish": "aegir-coverage publish", - "docs": "aegir-docs" - }, - "repository": { - "type": "git", - "url": "https://github.com/ipfs/js-ipfs-repo.git" - }, - "keywords": [ - "IPFS", - "libp2p", - "datastore" - ], - "pre-commit": [ - "lint", - "test" - ], - "homepage": "https://github.com/ipfs/js-ipfs-repo", - "engines": { - "node": ">=4.0.0", - "npm": ">=3.0.0" - }, - "devDependencies": { - "aegir": "^11.0.2", - "chai": "^4.0.2", - "dirty-chai": "^2.0.0", - "lodash": "^4.17.4", - "memdown": "^1.2.4", - "multihashes": "~0.4.5", - "ncp": "^2.0.0", - "pre-commit": "^1.2.2", - "rimraf": "^2.6.1" - }, - "dependencies": { - "async": "^2.5.0", - "base32.js": "^0.1.0", - "cids": "^0.5.0", - "datastore-core": "^0.2.0", - "datastore-fs": "^0.2.0", - "datastore-level": "^0.4.2", - "debug": "^2.6.8", - "interface-datastore": "^0.2.2", - "ipfs-block": "~0.6.0", - "level-js": "timkuijsten/level.js#idbunwrapper", - "leveldown": "^1.7.2", - "lock-me": "^1.0.2", - "lodash.get": "^4.4.2", - "lodash.has": "^4.5.2", - "lodash.set": "^4.3.2", - "multiaddr": "^2.3.0", - "safe-buffer": "^5.1.1" - }, - "license": "MIT", - "contributors": [ - "Brian Hoffman ", - "David Dias ", - "Dmitriy Ryajov ", - "Francisco Baio Dias ", - "Friedel Ziegelmayer ", - "Greenkeeper ", - "Justin Chase ", - "Lars-Magnus Skog ", - "Pau Ramon Revilla ", - "Richard Littauer ", - "Stephen Whitmore ", - "greenkeeper[bot] ", - "nginnever ", - "npmcdn-to-unpkg-bot " - ] -} -� \ No newline at end of file diff --git a/test/test-repo/blocks/IL/CIQJFGRQHQ45VCQLM7AJNF2GF5UHUAGGHC6LLAH6VYDEKLQMD4QLILY.data b/test/test-repo/blocks/IL/CIQJFGRQHQ45VCQLM7AJNF2GF5UHUAGGHC6LLAH6VYDEKLQMD4QLILY.data deleted file mode 100644 index 62d1c297..00000000 --- a/test/test-repo/blocks/IL/CIQJFGRQHQ45VCQLM7AJNF2GF5UHUAGGHC6LLAH6VYDEKLQMD4QLILY.data +++ /dev/null @@ -1,8 +0,0 @@ - -��Come hang out in our IRC chat room if you have any questions. - -Contact the ipfs dev team: -- Bugs: https://github.com/ipfs/go-ipfs/issues -- Help: irc.freenode.org/#ipfs -- Email: dev@ipfs.io -� \ No newline at end of file diff --git a/test/test-repo/blocks/JA/CIQHS34HCZMO72ZWZBHY5SBVURQIA4LDBBVVTHZMZE5KPUQSVNVHJAQ.data b/test/test-repo/blocks/JA/CIQHS34HCZMO72ZWZBHY5SBVURQIA4LDBBVVTHZMZE5KPUQSVNVHJAQ.data deleted file mode 100644 index 2469a1a1..00000000 --- a/test/test-repo/blocks/JA/CIQHS34HCZMO72ZWZBHY5SBVURQIA4LDBBVVTHZMZE5KPUQSVNVHJAQ.data +++ /dev/null @@ -1,3 +0,0 @@ -5 -" bj�C��4�T����DI�W'c����=�Y�9�$ package.json� - \ No newline at end of file diff --git a/test/test-repo/blocks/JN/CIQPHMHGQLLZXC32FQQW2YVM4KGFORVFJAQYY55VK3WJGLZ2MS4RJNQ.data b/test/test-repo/blocks/JN/CIQPHMHGQLLZXC32FQQW2YVM4KGFORVFJAQYY55VK3WJGLZ2MS4RJNQ.data deleted file mode 100644 index 27088364..00000000 --- a/test/test-repo/blocks/JN/CIQPHMHGQLLZXC32FQQW2YVM4KGFORVFJAQYY55VK3WJGLZ2MS4RJNQ.data +++ /dev/null @@ -1,3 +0,0 @@ - - -ipfs \ No newline at end of file diff --git a/test/test-repo/blocks/LG/CIQJBQD2O6K4CGJVCCTJNUP57QHR4SKHZ74OIITBBGLOMCO3ZOLWLGA.data b/test/test-repo/blocks/LG/CIQJBQD2O6K4CGJVCCTJNUP57QHR4SKHZ74OIITBBGLOMCO3ZOLWLGA.data deleted file mode 100644 index 71be805f..00000000 --- a/test/test-repo/blocks/LG/CIQJBQD2O6K4CGJVCCTJNUP57QHR4SKHZ74OIITBBGLOMCO3ZOLWLGA.data +++ /dev/null @@ -1,9 +0,0 @@ - -��Some helpful resources for finding your way around ipfs: - -- quick-start: a quick show of various ipfs features. -- ipfs commands: a list of all commands -- ipfs --help: every command describes itself -- https://github.com/ipfs/go-ipfs -- the src repository -- #ipfs on irc.freenode.org -- the community irc channel -� \ No newline at end of file diff --git a/test/test-repo/blocks/M4/CIQOLBQZSZAODJGGH6RYYVBUXHTS3SM5EORZDU63LYPEFUAFE4SBM4I.data b/test/test-repo/blocks/M4/CIQOLBQZSZAODJGGH6RYYVBUXHTS3SM5EORZDU63LYPEFUAFE4SBM4I.data deleted file mode 100644 index f2bf4f8b..00000000 --- a/test/test-repo/blocks/M4/CIQOLBQZSZAODJGGH6RYYVBUXHTS3SM5EORZDU63LYPEFUAFE4SBM4I.data +++ /dev/null @@ -1,115 +0,0 @@ - -� � # 0.1 - Quick Start - -This is a set of short examples with minimal explanation. It is meant as -a "quick start". Soon, we'll write a longer tour :-) - - -Add a file to ipfs: - - echo "hello world" >hello - ipfs add hello - - -View it: - - ipfs cat - - -Try a directory: - - mkdir foo - mkdir foo/bar - echo "baz" > foo/baz - echo "baz" > foo/bar/baz - ipfs add -r foo - - -View things: - - ipfs ls - ipfs ls /bar - ipfs cat /baz - ipfs cat /bar/baz - ipfs cat /bar - ipfs ls /baz - - -References: - - ipfs refs - ipfs refs -r - ipfs refs --help - - -Get: - - ipfs get -o foo2 - diff foo foo2 - - -Objects: - - ipfs object get - ipfs object get /foo2 - ipfs object --help - - -Pin + GC: - - ipfs pin add - ipfs repo gc - ipfs ls - ipfs pin rm - ipfs repo gc - - -Daemon: - - ipfs daemon (in another terminal) - ipfs id - - -Network: - - (must be online) - ipfs swarm peers - ipfs id - ipfs cat - - -Mount: - - (warning: fuse is finicky!) - ipfs mount - cd /ipfs/ - ls - - -Tool: - - ipfs version - ipfs update - ipfs commands - ipfs config --help - open http://localhost:5001/webui - - -Browse: - - webui: - - http://localhost:5001/webui - - video: - - http://localhost:8080/ipfs/QmVc6zuAneKJzicnJpfrqCH9gSy6bz54JhcypfJYhGUFQu/play#/ipfs/QmTKZgRNwDNZwHtJSjCp6r5FYefzpULfy37JvMt9DwvXse - - images: - - http://localhost:8080/ipfs/QmZpc3HvfjEXvLWGQPWbHk3AjD5j8NEN4gmFN8Jmrd5g83/cs - - markdown renderer app: - - http://localhost:8080/ipfs/QmX7M9CiYXjVeFnkfVGf3y5ixTZ2ACeSGyL1vBJY1HvQPp/mdown -� \ No newline at end of file diff --git a/test/test-repo/blocks/N2/CIQDWKPBHXLJ3XVELRJZA2SYY7OGCSX6FRSIZS2VQQPVKOA2Z4VXN2I.data b/test/test-repo/blocks/N2/CIQDWKPBHXLJ3XVELRJZA2SYY7OGCSX6FRSIZS2VQQPVKOA2Z4VXN2I.data deleted file mode 100644 index 7e1e7f1eeb93f1ec51a94ce61cb254cc8489a4b9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 10765 zcmeIzF%AJy6a~;vHTfF(nL>g@L^89DnOY^W1HD#h4w@rUFyUbJQg=Eu}B*b@U2CYa08q$!4 zG^8O7X-GpF(vXHUq#+GyNJARZkcKp*Aq{CrLmJYMhBTxh4QWV28q)9&G>n~?wYux} HF#P5V?hx|V diff --git a/test/test-repo/blocks/OO/CIQBT4N7PS5IZ5IG2ZOUGKFK27IE33WKGJNDW2TY3LSBNQ34R6OVOOQ.data b/test/test-repo/blocks/OO/CIQBT4N7PS5IZ5IG2ZOUGKFK27IE33WKGJNDW2TY3LSBNQ34R6OVOOQ.data deleted file mode 100644 index 77034827..00000000 --- a/test/test-repo/blocks/OO/CIQBT4N7PS5IZ5IG2ZOUGKFK27IE33WKGJNDW2TY3LSBNQ34R6OVOOQ.data +++ /dev/null @@ -1,27 +0,0 @@ - -� � IPFS Alpha Security Notes - -We try hard to ensure our system is safe and robust, but all software -has bugs, especially new software. This distribution is meant to be an -alpha preview, don't use it for anything mission critical. - -Please note the following: - -- This is alpha software and has not been audited. It is our goal - to conduct a proper security audit once we close in on a 1.0 release. - -- ipfs is a networked program, and may have serious undiscovered - vulnerabilities. It is written in Go, and we do not execute any - user provided data. But please point any problems out to us in a - github issue, or email security@ipfs.io privately. - -- security@ipfs.io GPG key: - - 4B9665FB 92636D17 7C7A86D3 50AAE8A9 59B13AF3 - - https://pgp.mit.edu/pks/lookup?op=get&search=0x50AAE8A959B13AF3 - -- ipfs uses encryption for all communication, but it's NOT PROVEN SECURE - YET! It may be totally broken. For now, the code is included to make - sure we benchmark our operations with encryption in mind. In the future, - there will be an "unsafe" mode for high performance intranet apps. - If this is a blocking feature for you, please contact us. -� \ No newline at end of file diff --git a/test/test-repo/blocks/QV/CIQOHMGEIKMPYHAUTL57JSEZN64SIJ5OIHSGJG4TJSSJLGI3PBJLQVI.data b/test/test-repo/blocks/QV/CIQOHMGEIKMPYHAUTL57JSEZN64SIJ5OIHSGJG4TJSSJLGI3PBJLQVI.data deleted file mode 100644 index e69de29b..00000000 diff --git a/test/test-repo/blocks/R3/CIQBED3K6YA5I3QQWLJOCHWXDRK5EXZQILBCKAPEDUJENZ5B5HJ5R3A.data b/test/test-repo/blocks/R3/CIQBED3K6YA5I3QQWLJOCHWXDRK5EXZQILBCKAPEDUJENZ5B5HJ5R3A.data deleted file mode 100644 index 389e1117..00000000 --- a/test/test-repo/blocks/R3/CIQBED3K6YA5I3QQWLJOCHWXDRK5EXZQILBCKAPEDUJENZ5B5HJ5R3A.data +++ /dev/null @@ -1,28 +0,0 @@ - -��Hello and Welcome to IPFS! - -██╗██████╗ ███████╗███████╗ -██║██╔══██╗██╔════╝██╔════╝ -██║██████╔╝█████╗ ███████╗ -██║██╔═══╝ ██╔══╝ ╚════██║ -██║██║ ██║ ███████║ -╚═╝╚═╝ ╚═╝ ╚══════╝ - -If you're seeing this, you have successfully installed -IPFS and are now interfacing with the ipfs merkledag! - - ------------------------------------------------------- -| Warning: | -| This is alpha software. Use at your own discretion! | -| Much is missing or lacking polish. There are bugs. | -| Not yet secure. Read the security notes for more. | - ------------------------------------------------------- - -Check out some of the other files in this directory: - - ./about - ./help - ./quick-start <-- usage examples - ./readme <-- this file - ./security-notes -� \ No newline at end of file diff --git a/test/test-repo/blocks/RQ/CIQGPZHNWKMUTP4X4VJYCKHWEL3A6UHYWGT2SVEOIQYOVOC2RMJBRQQ.data b/test/test-repo/blocks/RQ/CIQGPZHNWKMUTP4X4VJYCKHWEL3A6UHYWGT2SVEOIQYOVOC2RMJBRQQ.data deleted file mode 100644 index 33e7bc4fb3fef6079cf2b108b9bb511093caaf65..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 355 zcmWgA<5Ch*SgK>j#LTl(=mjz6J*Z#Z`mUeibke1%>*qt`A@ymo*6O-~wOC)CS z3K@XZPnu<5V|lBKJN>}4>2CTNRSd^0_H1qVw~ozKk4Ii%i@p$ha(-S(VseSZ2}U7Z zkd6rls>-Jxlr$AsHtpiyKm2l@?&p6zQA*^T_KfrP>FKF6gjh0Ca|$F5F$w8_lz-mv ztm*pf?kXL{+@v*+dcxh7dMNgkZw-4hS>GyUr^q%TmV(T@bO|0I6OihsZIaU*9xgd% zzqlvFWao3;lXI09PrQ6PPR{89tGbFULk#uE+PJ`Z;V&+ z1U6lID0f{Z^isTm(;-#HC$d6r&lkSDeB+G}TTyCaN^YveISwInkOs+*`)hXfd}X^9 z>#VWr`UT&2r;MVkvnp;q5j$MdKR4V;h_5&`xwI&=q*6C8za+I-Vk##W69*#zchrnE diff --git a/test/test-repo/blocks/RY/CIQKVNGIGQHPMLDVIATUPL3YPUYSLNHT5HJ2USSAZZO5GUTAVSYQRYY.data b/test/test-repo/blocks/RY/CIQKVNGIGQHPMLDVIATUPL3YPUYSLNHT5HJ2USSAZZO5GUTAVSYQRYY.data deleted file mode 100644 index 5cebb55ff3cad19c1adcbc43a2de9648cc78fc84..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 10849 zcmWgA;8GG&c)a0=(~LhdBC~#fIWaSTr;7SI$0sSXC;Kd!I#aqLXh*0Jg9L-nsLIh$ z8ciXixns0s7%de?YlP9-aI`iYtqn(O!_nGsv^E^A4M%Ik(b{mdHXN-DM{C2;+HkZs z9IXvUYs1mnaI`iYtqn(O!_nGsv^E^A4M%Ik(b{mdHXN-DM{C2;+HkZs9IXvUYs1mn zaI`iYG__&mlom_2c+JFlUxXS??=;#MbToJ7(P`PK4+MoQKg`(u9aq^n08;K6y{ppKiy^cjTsS?c*%2MXdjfoYG>+7O$B& z?~72w>77RVf{y0SJUT5q^?{&}<%b!&-y>9}KY6=prsw|YPlFpZzA1g<5BRZh`O1(! b7lT(jqPm474uMp1adUAn3N$duG5`Sp+XpAf diff --git a/test/test-repo/blocks/V2/CIQFIFKJWK4S7LN2N6IHF64AEZZDKWGGUVG7JNRNTYBQJ4ALX6JGV2Q.data b/test/test-repo/blocks/V2/CIQFIFKJWK4S7LN2N6IHF64AEZZDKWGGUVG7JNRNTYBQJ4ALX6JGV2Q.data deleted file mode 100644 index c2aa971d..00000000 --- a/test/test-repo/blocks/V2/CIQFIFKJWK4S7LN2N6IHF64AEZZDKWGGUVG7JNRNTYBQJ4ALX6JGV2Q.data +++ /dev/null @@ -1,3 +0,0 @@ -2 -" !���}���{��o�H�Ǔ�j��A����� s} README.md�c - \ No newline at end of file diff --git a/test/test-repo/blocks/V3/CIQJEFSTFKB33VFP24TN55TBQT5H7T37KCTQMVM6Z6IOJPWVPPYHV3Q.data b/test/test-repo/blocks/V3/CIQJEFSTFKB33VFP24TN55TBQT5H7T37KCTQMVM6Z6IOJPWVPPYHV3Q.data deleted file mode 100644 index 9c9015c3..00000000 --- a/test/test-repo/blocks/V3/CIQJEFSTFKB33VFP24TN55TBQT5H7T37KCTQMVM6Z6IOJPWVPPYHV3Q.data +++ /dev/null @@ -1,3 +0,0 @@ -/ -" ;)�=֝ޤ\S�jX��aJ�,d��U�U8�+v�direct�T2 -" ����\�@q�buqPGw����s9c J���? recursive�U \ No newline at end of file diff --git a/test/test-repo/blocks/VP/CIQL7ROGT73FOVB23WYGP4F6LU5WSQKMNZHPN6IGKKQU62FWLSH7VPY.data b/test/test-repo/blocks/VP/CIQL7ROGT73FOVB23WYGP4F6LU5WSQKMNZHPN6IGKKQU62FWLSH7VPY.data deleted file mode 100644 index 6affa52fda4e190b4e77d51639d461e4ff732adc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 10807 zcmeI&KMO%&6vy$0Nsoa}hFd7zlskG0WKjnBvzTugtwv^Lk$6~@EW&h6N(S%4wJ4iz ziUIG!@ICXHPUrkS%MnN0lDTY7#pyehn7tln)7fWp>w@pjN0r6cB)c`QDaukpCsSfW zxDJwmR73rhJ%O>wPD%gIM*sU9K9#QvT+(24Wq5|DQUlBcXUNoL1lZ02^oo A+5i9m diff --git a/test/test-repo/blocks/WS/CIQDIIAHGSIWR6TXBONXKL3LKJHZWQMXKJO7ZJOGM6MYLW3OGOLIWSA.data b/test/test-repo/blocks/WS/CIQDIIAHGSIWR6TXBONXKL3LKJHZWQMXKJO7ZJOGM6MYLW3OGOLIWSA.data deleted file mode 100644 index a28611f4..00000000 --- a/test/test-repo/blocks/WS/CIQDIIAHGSIWR6TXBONXKL3LKJHZWQMXKJO7ZJOGM6MYLW3OGOLIWSA.data +++ /dev/null @@ -1,3 +0,0 @@ -/ -" ;)�=֝ޤ\S�jX��aJ�,d��U�U8�+v�direct�T2 -" ��Ɵ�WT:ݰg�];iALnN��R�Oh�\��� recursive�T \ No newline at end of file diff --git a/test/test-repo/blocks/X3/CIQFTFEEHEDF6KLBT32BFAGLXEZL4UWFNWM4LFTLMXQBCERZ6CMLX3Y.data b/test/test-repo/blocks/X3/CIQFTFEEHEDF6KLBT32BFAGLXEZL4UWFNWM4LFTLMXQBCERZ6CMLX3Y.data deleted file mode 100644 index 9553a942..00000000 --- a/test/test-repo/blocks/X3/CIQFTFEEHEDF6KLBT32BFAGLXEZL4UWFNWM4LFTLMXQBCERZ6CMLX3Y.data +++ /dev/null @@ -1,2 +0,0 @@ - - \ No newline at end of file diff --git a/test/test-repo/blocks/_README b/test/test-repo/blocks/_README deleted file mode 100644 index 23cb0909..00000000 --- a/test/test-repo/blocks/_README +++ /dev/null @@ -1,30 +0,0 @@ -This is a repository of IPLD objects. Each IPLD object is in a single file, -named .data. Where is the -"base32" encoding of the CID (as specified in -https://github.com/multiformats/multibase) without the 'B' prefix. -All the object files are placed in a tree of directories, based on a -function of the CID. This is a form of sharding similar to -the objects directory in git repositories. Previously, we used -prefixes, we now use the next-to-last two charters. - - func NextToLast(base32cid string) { - nextToLastLen := 2 - offset := len(base32cid) - nextToLastLen - 1 - return str[offset : offset+nextToLastLen] - } - -For example, an object with a base58 CIDv1 of - - zb2rhYSxw4ZjuzgCnWSt19Q94ERaeFhu9uSqRgjSdx9bsgM6f - -has a base32 CIDv1 of - - BAFKREIA22FLID5AJ2KU7URG47MDLROZIH6YF2KALU2PWEFPVI37YLKRSCA - -and will be placed at - - SC/AFKREIA22FLID5AJ2KU7URG47MDLROZIH6YF2KALU2PWEFPVI37YLKRSCA.data - -with 'SC' being the last-to-next two characters and the 'B' at the -beginning of the CIDv1 string is the multibase prefix that is not -stored in the filename. diff --git a/test/test-repo/config b/test/test-repo/config deleted file mode 100644 index 5a9765f5..00000000 --- a/test/test-repo/config +++ /dev/null @@ -1,99 +0,0 @@ -{ - "Identity": { - "PeerID": "QmQJPPKEd1a1zLrsDzmzKMnbkkNNmCziUMsXvvkLbjPg1c", - "PrivKey": "CAASqQkwggSlAgEAAoIBAQC6rWI46zRrseaaQq94Y14LJczstp0boYRr7vzg3jugJOqGhqzD+t88vmelYuyI0bJkoMI8WkfSBT0OYbmwszgzNFvYvZNPvfgzt2qLODU1/3PP0ihYcO42yAoX3KWlExuX1xi/nNaUvMPGF4oMucp6PN2o/A2uAv0jsnAeb8hJM4MNs5aQoI5gby2yxtM9mc78UkdV1MZ00IvOdrU2IViNwTVre/PwjbaR4RPcF40+/E2zVJG/z8wOMnpkKAkIxCSjJcaRcxR1bAfP/cKLbpxrpnQlHgVEBVhIaqkd3i5i1+dG8VgsD5LZPh/hLjldXiSYrNMO70Ksne6sc7nL/3UhAgMBAAECggEBAKqKe5FETz+T5vhGs8doACJvBie7LQxxImj4jE1iuVY0Y41Cu9LO/eKgkE+ncDAOYstLkidQ0wwrfPwGi33CPTWKP95iUpInGvkkN1G4p+QM2+CgPfuOMBIb7hyelzWXnz24ZAOpZN+9E52FI7k8gp3pvRcELfsq/9f8zDECLhewRjZitiY5ewfBKsK5LFKQSRg8DIVIKq4iqi7QMRRwbFDtDLcUHJepXSTRhmhWr5b/23O8OxnHExtjXMFqtBzvaAuZPnw2whr8ujV3W+5PyY+btx6M0k/dDQe2dFSJWm8AKLF8AL+houl2vtnFQ47yeYisWd02BcR91DyF5u6hxYECgYEA8b1UlUgCJ1h4MLTt0HOk+JwccJtyLoofe/y8/i4sMtjUZ4kiyIlEUrmPaE3qcNsUOG5eldz4nfN4AYRG4UQwmMFehpGC5pxH/kW8xwC9iFxDbkrNwJ8T5Jc8EQmFZ9BTXviQ3d5ViO06gtOiAdf2L/ci/qazCR7eowdjvbUso0MCgYEAxbCFZjj0L7+Zuyl6Znv7A1raJUKTGR8J5OsP6lIcRq0EfC6OcoCS6bx7FIbM0gkgGj+F/1l1bonfCcCDv5x4tRZzVUCz2KrDEwo/RCv1Y47ipuusao8fTTlO5tgESl4jAvaD1mfZom9BslO4sk3CxXJyAuMJUCc/8To6HLPclcsCgYEAwcuRknd+2oIPIKT7fcAoAfJMzm2f1PWAFgrgjDkBz+WKKlKDIxcAQf1mnSzRhJXtGOxKQYQ7f4zeqQCdPhxHhT6IBAOFWFDzWkIX+8V5LGci27l2RzSAYyJm0hW68CXPoHRO1r9V/QaJgLYey7GROJS8Zj/HiclInJPg/wXOejcCgYBjiwcg+fy7LldSjwg7IqK6hbzilitLsv1gb5yL+NyUGfrwLOE9dtCDMY0oQNhRypaHoQTwFzOyfWn5lx7AFSISzUM14mas6w9fPwGsahYeE4y9UF55KagxUnIQeyVt7QjkLQ0loRVNXYhPKykNX2p70aznFztRSPJBnXg1i7u/EQKBgQC8iZQF/5vwgVN3FgEugru9DKUOWAXMXWcMENRgQ9kiUNU7cwqszQyOk/zmNXrrIbdzn+4/H4gBNRw+dy0t1D5nG7+AuMn1J9WmRS0NF6eF43ttRRmERxiYwRssBs0JLaQWaobmEqcMNCygm1BCqQrKfmY2oI3HDOukMwgMpfZsSQ==" - }, - "Datastore": { - "Type": "leveldb", - "Path": "/Users/pedroteixeira/.ipfs/datastore", - "StorageMax": "10GB", - "StorageGCWatermark": 90, - "GCPeriod": "1h", - "Params": null, - "NoSync": false, - "HashOnRead": false, - "BloomFilterSize": 0 - }, - "Addresses": { - "Swarm": [ - "/ip4/0.0.0.0/tcp/4001", - "/ip6/::/tcp/4001" - ], - "API": "/ip4/127.0.0.1/tcp/5001", - "Gateway": "/ip4/127.0.0.1/tcp/8080" - }, - "Mounts": { - "IPFS": "/ipfs", - "IPNS": "/ipns", - "FuseAllowOther": false - }, - "Discovery": { - "MDNS": { - "Enabled": true, - "Interval": 10 - } - }, - "Ipns": { - "RepublishPeriod": "", - "RecordLifetime": "", - "ResolveCacheSize": 128 - }, - "Bootstrap": [ - "/ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ", - "/ip4/104.236.176.52/tcp/4001/ipfs/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z", - "/ip4/104.236.179.241/tcp/4001/ipfs/QmSoLPppuBtQSGwKDZT2M73ULpjvfd3aZ6ha4oFGL1KrGM", - "/ip4/162.243.248.213/tcp/4001/ipfs/QmSoLueR4xBeUbY9WZ9xGUUxunbKWcrNFTDAadQJmocnWm", - "/ip4/128.199.219.111/tcp/4001/ipfs/QmSoLSafTMBsPKadTEgaXctDQVcqN88CNLHXMkTNwMKPnu", - "/ip4/104.236.76.40/tcp/4001/ipfs/QmSoLV4Bbm51jM9C4gDYZQ9Cy3U6aXMJDAbzgu2fzaDs64", - "/ip4/178.62.158.247/tcp/4001/ipfs/QmSoLer265NRgSp2LA3dPaeykiS1J6DifTC88f5uVQKNAd", - "/ip4/178.62.61.185/tcp/4001/ipfs/QmSoLMeWqB7YGVLJN3pNLQpmmEk35v6wYtsMGLzSr5QBU3", - "/ip4/104.236.151.122/tcp/4001/ipfs/QmSoLju6m7xTh3DuokvT3886QRYqxAzb1kShaanJgW36yx", - "/ip6/2604:a880:1:20::1f9:9001/tcp/4001/ipfs/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z", - "/ip6/2604:a880:1:20::203:d001/tcp/4001/ipfs/QmSoLPppuBtQSGwKDZT2M73ULpjvfd3aZ6ha4oFGL1KrGM", - "/ip6/2604:a880:0:1010::23:d001/tcp/4001/ipfs/QmSoLueR4xBeUbY9WZ9xGUUxunbKWcrNFTDAadQJmocnWm", - "/ip6/2400:6180:0:d0::151:6001/tcp/4001/ipfs/QmSoLSafTMBsPKadTEgaXctDQVcqN88CNLHXMkTNwMKPnu", - "/ip6/2604:a880:800:10::4a:5001/tcp/4001/ipfs/QmSoLV4Bbm51jM9C4gDYZQ9Cy3U6aXMJDAbzgu2fzaDs64", - "/ip6/2a03:b0c0:0:1010::23:1001/tcp/4001/ipfs/QmSoLer265NRgSp2LA3dPaeykiS1J6DifTC88f5uVQKNAd", - "/ip6/2a03:b0c0:1:d0::e7:1/tcp/4001/ipfs/QmSoLMeWqB7YGVLJN3pNLQpmmEk35v6wYtsMGLzSr5QBU3", - "/ip6/2604:a880:1:20::1d9:6001/tcp/4001/ipfs/QmSoLju6m7xTh3DuokvT3886QRYqxAzb1kShaanJgW36yx" - ], - "Tour": { - "Last": "" - }, - "Gateway": { - "HTTPHeaders": { - "Access-Control-Allow-Headers": [ - "X-Requested-With", - "Range" - ], - "Access-Control-Allow-Methods": [ - "GET" - ], - "Access-Control-Allow-Origin": [ - "*" - ] - }, - "RootRedirect": "", - "Writable": false, - "PathPrefixes": [] - }, - "SupernodeRouting": { - "Servers": null - }, - "API": { - "HTTPHeaders": null - }, - "Swarm": { - "AddrFilters": null, - "DisableBandwidthMetrics": false, - "DisableNatPortMap": false - }, - "Reprovider": { - "Interval": "12h" - }, - "Experimental": { - "FilestoreEnabled": false, - "ShardingEnabled": false, - "Libp2pStreamMounting": false - } -} \ No newline at end of file diff --git a/test/test-repo/datastore/000051.log b/test/test-repo/datastore/000051.log deleted file mode 100644 index 6591370d27190ca44de541a969165001623d07fa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 30167 zcmd6wdH5eyeeaF7)}P+q+j?8u+N)J+Z(Hk9XF1dLwlinWoH=J^&YW4#%&2W|Ew)ly zTeaTWR?AMg;o#k^r>w9jf4>`7Y-IDjdZt>R={I=vRAM|793ODfT-h1;Kmwf$}&;H3VZ(aE8 zj-UO^=l=I6|7pj!@9@@ly<>;_PrC0Cb$SibBS9Q)UtKs@M|Lw1ucL znRj(Z(yFLKM!Puk?7WPHR$j%jP$)6}e=3l=Z_|hrc2e|FIyf5t)avZPWpU}IePg;o z^4eiZB6Q!CK~XGx<+9t>-EhY8wZHxF=a({oy>9|6Nod9msjRl{7-x~~he?|A$|(sU zJqhwOslB>Wv7z~S`uKrA zu;rh-)lRS`Y+SFXqB^ODfaHU3r?DOQgxQ(z=LHKa&u$a1=?P_q5 zX#9ac_1+a%0qlblU}>hMPIbdX891`BCS65F>z)p>$e+UkoycUe=)v=9`>CYO~oTHA%&FYJ2rT~}OkCcu^#8-D>{ zzdHe@Et*u7c^RyIsVbeUbN}kh-G%Skpswuj@@JFy<=zFhsvp7u# zK6KiubnCZm)bPm3hEfN;%|+f?3AF~xk$+%71hy}*vIVnxr zxK(6YtCe-3um?GJhDd9xSXgoJ@f#033t)$w`-Y_evnIe;DtUrMmz_poKp&a7bigD7^byqj8Yq_Eil`>9MTV>xj#O|EI_F7Wfk#+=r z_nMS;el8YHTm7SN95#ae!N=bJHh|d^U`dh&#uq^`;+CmDR0$>A#StqOmfybnvGwz3L-n>^kLkDR%i`f*;&4#ZLa#cZ{;PIUU}>K z5$u57-nj{2WCAR65|L)oufvRZLz3r3+*N^SQzuJ>-x20vO_C;(ee0EJ-nVf$J7Xbj z>elz$tdS&?;8K{RuFg z<$c!kq#|JnOBF|+$)dux8m7-X5_m;h6qpAE_s(SUtq zTE~i&8Pg=qb>~?fyaKLBM%zYE(Q6snaX35tbDgnu8!x|b2KMpece);6e>efgd)Toa z*SqXP$^0_yL}!Pk$htHf@=UZ%TUDWgN0lkn))m>7!~cWluetm7amD`I>W{q#V1G0L z#tg{=J1xS5rMYrRC`wf&kv6QgV@_fC)5t_^K^&drMQW;Q(Og8#w23QK?5BB#XFsH= zbhsP@87l*j4X0b3CE7%%H zgHUINly%&bj)tt(v_u#YIuXx;rp`U9jNA|}{h}S4>x^x9=DQ~yK7t+m=Ho|y%%1}D z>m>4dS8~;dSBYE>-%hW#$qe2`XLEYN@1E?iiK6g z41QdMA{TZBuOPL2X|XP-LI+Q*sEdWWRvz*2u_M?Y-Twg&uwViVH_|J8DY?VDxUEU= zI=;&Bf;vGdMCB2;al@d6cMuP_fJ!9gY%3WnrIbkQPX=3=hHzj4LWyPrOSUB5uy53q0oY`d=5%0t&|x^e`2M89Vlz}N(s zC|ftJhe2UO;v~%CBrYS*tSi~$HOl-v&fzN^0Z}i9JI{fF{@X#XIr|O z?Mom3A;99qtHz!Z~ShGqGLmfM$v31_HMX9ynz6b-K%?#{Zw{qou zSM71r2)6#PkBlB{JOP$fm0^KPvc6Sbz+u@lK{Hwgeo)YoMAW5GoDQw$!0Wd3Afjyu zMjl(adgZzi?7rm6-#+AZOT?C7U)Um9<3~x!L};_Ts{7j3odcGvbOXC&`9L&vO(che z5LvY?t0JEEffclNl#Ry>;bL9rgwi;%S%OD`MGBkW*`6M>KlFnc3YjOmBiwSrhUu~p z3y0qF)Qk6wY)9|)cYlaQlUuTFoxVs*gJ@CoaTp|?A2}BFV2Wx*)DR~4TV2A0q48k# zwXwsgra7KrP)T_>r}`??P~dFtibCE(GCRpjY9dEN9w^U)1eThJ78675s9jflYZf>q+l_0xX>X zql9EiM-hVURJ7Phcvmd&arW(AbBj4K!b;mk7O>MLj$hd=zTC|1>>VaB;(RtuT<8H8ydoxoumrJ(s_EpcG zG=g38b0_~4z|;g-6k3r(qEv(l$=m_yBe;jkEvr-^&LAn0BmPh-TiOz5yLWjv%)sV) zE>FJn>>r&yf*rf?;->+YPk?2PVjcWr(Md0~0>`E*2`WT#yzP*yQ9~IRB*?Uv8QLYr zwYKH(uiO8$C$1gAK6*&=D*)3|V8K9jIkY4YaX|#Ok2#eqU~q#3K2R1btMrI&Gsm`t z6I!w3&VTq{IR4o6caLD+nny#JvSLfHSDLaQtUN!Xv2qYkN2wJi6^nX@k22{(QBg^_LjDe~P|v~o+ftxQ=)E#x~5JXz#(R3bAcuBp=~ zm2|ytbYXi*QRTjsOjW%{Ji?Q*ti(feFe1LuO+Rv{ASTzlXgq>UTNsFv- zUYCBB+KiMmY-QnxRn3~btOxC-U8=G)4um!HW9I_f?}qCRzGMV@{hvoK0<4|@C zGL@#5wYD0cxpnPkB5?*MhVQRLCvDGAD|(67aF>S~ywj!PxbNInm-)&Sm@`dU)0CXI}uY zPo)pP7hwGaSftVpt8B^Ibgq-S)b$*Zv9y*P>6TiTR@&u3Un>*Q%F0C7Ek>7ft=J2z z?|*RR2)5y8-?R!~!xR`tg3PlKgv7aJk+7``r|#WC4izhTkULhM2U%E!QSA)K^9_ld z*)a-J+;L9)Hf(v73pc5H;(27~kbB8>-=|d+mM9wv*`@g9Yp-8;VDAf$xnu-8?Nq)J zU>}|U+pb&r{NCT&{hK4$okxH8g8=);1elC?S;U3oBGykc56P6s;*s0Ljn8>Z34Yd? zv^}!;X_19ak&1MFJ9h9>r|$OR2)5%JzCWh#J~{#BBQb!KMTJ0wNm55Lbb3bxCZJVB zc!f8LNFFCHT%gboxl{0b@kugXp&F~G-6*oTBqh-eZgGa(pWCKo>2_gH!mJ{u74U0w zllNlbp7Zbh!M+y)>_a>K#uxC8|JjycuYAXO;28dF`j?mhShhDU^{&BZTtW7k|lq!8SM4=eaTi}ab-I(N(o4WA?^y`sa8ST zAmZ{7QDU*$I!4NoxruIys#R?){h5uO%fWu&{Hq_lX9U}tV!JNSRbSoZ*2hMQRX2Y7 z0qomfPVAeHZ4c9G=ruB57R?G)4<#x`trsKdC5OE7?YgVVJdWJFYyyGja^`={bJ!0( z|M4Ge8o}O@uN`pMKei>Iwo^jMlu>BW(^DbDlzuJ;*t23pmlC+g922XS-%nR2>>jp)oIB7_Ycslfn z%w?4IS`btWQ9t$<5Br$+ExB$~RAsG$H=Tst(3-Ad)QgFh64kcQ^nNY6#7=|6*Z#tB zm!I_HIV0HLtlcRF*p5?RG3v=hnhqUB{;4R8)g#hW-w@ z*w--EVSn_@bt|tL!Itju;xAw~KfWc{D|a(QfkxWOMwXhJQm0OX>RQKH+_OF~E$WYU zS_e)Zd7a-b4b$-C2QBs-|(3q*21T&SgQO29SnAo<%@uN7aNO!s@B4`_&tp#>ci>YaMYGS{XW-U-a?6wbDhFuHGh~I=8&pve zV$-HXC2P>Mhl?eZ8`T79%#`phjlv@4Xj);5sq#5)-3gE1dG>x6f$hMX-~8{mb$_=d z+g4lW_u6$jVTB&yNL@N%Sm;!cCXDlp5YG&eg^CQqq9U&}7ZnPovxf&^7g{dN;HGg_ zv&1tV@e{kR#h^-31brCk97W&2cXQ?$Smdw+x)NZ2KLM7}g!*Wv zvlMk>JM>!H&@gh)fQK%lxI>c39txABwL7?Z+{B&g7hlAAa+s^uuej>45$wa8&N~cX zpPB?q6H_##ianV+u^Z^ZX;S6+%7?$)%aWoj#gXS9VpgF-Mx%6oup3{z`LUBnuy=p% z-5&Yo~TyX+@I3uiU|*QlRXJmVmZZ966^>IMfcMGB%>LnTsWuLQS+a zh`ix@sHSW}rphGhk^pU7DkKQ=c-tpP35ix^Eg~X+;lLNy?|$kCw&FeS`vk!LaRN*R zMQG(DaVVE9rL|OzhE<45r%GaY(7>w$6dm%6=Jt@MCgrmgo9j6|{DQlWzi9+x@nmROok(dCbfv(C4D;CzS+;HL@BiPS8ymlkNJ~IJEb(j>A z+64*oI?6&j$W%dzFf~&4wn%CVK22*kMj5|lNY+K=%%Em@pnNMIvAX~vlP3};BN9lo zS)xI&6iBuAJkTogEmS!>-ZlNg{cD%s{DTqfn7;@<4zSNofMpa#e5fZ!hgNiyA*dLO95r*^13|#;!W}xD)oe7+~$~yFLoA&rN_)&Fm8GdzMd8Y_c~4t^nBonE(q-qJy|g zrQNki#>v6ADm01aoK_u)$_ibZ=qfC9n;`EyIBkcm8lR-OR&3|p_Beh0R{?hMRUf?! zVE;S;M$6C}N;gLd**HZT7Tp-U%BEI@YEvYtVKicbuBiO0K=Z^%~=O@6JR4Oe__vK_qgV<5p3C=zjG+S{$&CTsgh9B7%fB`4WXK9 zk+|F<+V*hhe1}-2Q(78jh6aDQ2(7MK%oF<5phQ2Q%}(d3^3kKo%92~1TeKu^QAe$4 zWBb&14AI)<20ye|*!8$uuY6_%d*rNteILO7bqXvbB(}3Os|vT_DUa$pt6a+tI%A2U zsnGr%5ld6$Csh?kc-t47J%r&{$~dW0R<=XuhFzB?tfYexF)eED_eGBqYaLSbSmuc- zL#)`+$B()F`4Q~ePk$l-*uPDIxrCdfE5arVL%SUE*f%AybD}EOZcAHuHuBQM1b*o_ zPFrIwW`}>S6cuv{xxg-7efAA2Mz9@TeCcdBv0vO0?3GR|D{Uo9(YOeaSxkb&a2GZ1 zmXV^CEQnkbz?IFi5-r;;io_k#E#vv0E4lR04SPLv;mG#HS^I86g!b>7+5UqFZM(8< zIP=jft{mC6j?jYIQJzM%ad5@hq}=c%N-R$>+cpW8Dl}Hz+i4Z3!p2lbKDN`%ZBvig zboKKWj}(`E?!a$g_jdVDw0j4C|EP1nGqOE%$_L+r-P?7`-P4W)mD@AnRHaBRl$Tpw%c^2*D z=T@A2?g+N;jnz2#v2+3~V4(0&MRIFdnCz3!Ph}Kr@)9;%LxCmsK(^F;O&Bs54fMNKxO( zEOZu?@K)!4c-E}hW8W7?u==5&-wBDYeKv#59-ghXGA*M(#&+aH=#HT1hO`2^m1qz} zcAViY#6W;fe1W(+4gDZNcA%cE*j#7q*?o^+dF(2HZT;$O*X23<=vAu^8Yw<`=l71n zzU?=$Zy5`F-?Er=J>Imk^CC;4En1+iMP$i)~Qo%3q@wp~xp zV>jNt^oo(2Fq~OKNg~Uiz+L1^(#VIl4#Eg&XN{cf52`O3CDlwPww!(@d~ z$$(}ducv8FbUjp9noU4G|DSDiV6SqmS131G`7z!GK!xgW4T zRV@pO{L92?(op;Bw?~Pz@{>U$m%^HVHfQ1>}I9Cu@7wzR~Y|5la zLJ~4D2$WfnUkfpA)l|uVcATWFG5#`pZ{|DvFCBB@LnGLV4ZraW4C?7e`MofBiP!TZhZ&9j+g*TTdzUea1j&CuDTk%GKlDy?21%5bdUr3dsM}3JIn%y zV3NYG7pHOHSw(#=vlIhe-cWZ@uG+#>2Dy@<4$@d-1|RcTO^O*l(RuxgP`-RsmQXKQ#2R6NP$&#a|S12T2godl2(@pK!J&`7i zOjFxfA?3szON#E@njb#s!0ZER|2hotd<90uWO)=P9_^g{bcd}A;tPVH!B$rT@Dlr%wL zBW}b1OlaKE-;`m1Dv#GQrX1pXy!RRH5gS?BLNn|w-?{Ce(?_ts|J_U9hVy;gmb#6fT{)GojaAeaM5CjBJm8;+K90$Lho_*|yp})aD{oB?{%z1h)=uCi-Sz z0Bem9w5F7`1I*D74OLj@N#X`5bBgfg*BpTzeE0Ixzdx?nt6aqG zde|Sn`)dzw7(s3!zkW5g?kf{pr~EicP&Z||>ST@409|4#1go1)3-JAB^bv3}g`1nO10@xW7U}#5B zXa$M#7WUDn?a(4q+cqSteLrbYbF_0KGYTOmsx2oNDon48D>j!ccw+e@Pb?k5-hSS? zrvP^51lV@91yA3z=cNaaU`M`w@3Fh#tO>Bp)eX8Iy)<{IB15YN>`}^k)g)<(qNxdL zui{Q;8XdH;g`E60^qX%uVEryfj9_oMW{1%>oIL@CnJi$1ejMfrz^@-OuGsIs?fpjp?A!@3m18WL zqbCS1Y0Ya7;}=E^5@8I6y@gE&&nShVnh;%nyeqVWUhkQ?7;s&qCPfoTEdp6f3Z0=) z7I6=cmPicC=xT5g`s;xQ+$-ydV&3BD3%|2s1iS1L*Y6B#dfsNRpZG(#bk7$KyLbfK zI^5c>kImC-HlBXkrM;qdlqam1UxNq4D zFKv8j1iSK_>{@_*bpkB6GpNQ1N`fMWuZPwySa_H%KgDF8#GG=E(a_Y(Xkh0^8rC&k zOk~ehZM)*YLvLSp1;9>7&KR}1E|~y>f}#qc9wRjr_W~8#c7PEEs61Fyqj-W;RD*dE z&uT10P8{MB_G0RPF0ikBW94^eVDH`cUoQvPr4wK|Y&fJG1H&wBM|ElxqSFD&Rc)CK zn4iOpN6_SC@St>~J)a1&7!J<0mFvzq@ti})75k2q%$!QT7TPaO!b z%csEVIA9%8$7+E4%;ClN=pe;tpKQ@SVRxuG(j>oY`m3o#>_4;G( z7{L~H_~K6kZ1n^fg`6XlQe%wxwiGkko{ngnpnBscWz@uOMAHo7S|-6*cOQ>JE{nC) zsLkTc%?!$}NCfmyudq0D*qot-zL>Fdj8EmN@G#oxK|nGbCqavaJJ!8)#QG8J^c~-+ z0CvR$7&gl$%58?aLLC#`AX#18=vV6?ye=`Hv*CxO+=u~D3iXG7RfyOVIKt+?IWLmWnZ?jA>E7I9@OUs%xF1JzG ztY%v|*SO4{N8NPS(h==?q*qRA2$)c)`lEAjx2muasM$kKjLE6}>Apk+^B5R@J zL{O>Mrq$?1YVG+eg%9uZ-R~_Q!9H@`yGN(_$_X%y&icFxvy4G#2T4(A)X9otniR=X zWNctPRN&N(-EwN%ULk`lTTIemBYQ}I80`gpNipc)+X~{gkUmHQc(E2MRjCb%pcE3T zQK2D+u(01DhwpdP2zJ3c{&CcQyJ`Z=hb(KQ1B}_mHVk}SR*h4kXrZb!k6LK$^bD#p zl?Z(|tOe6}D`(!tTn6@zlPIn*kYQR1NhgyTi?obz@$8Zsdg1j=XU72=?=zfJaAhe;8^|VmwLqSNhdG2@b39$>nI1e$1;3oMdF{7y?WU71yKV&A+MD05%d_<4 zO&4E3Qe67g?_G=Kxo%>4f`+7a5!;r_v^!R~EJ_p9sUW?j@G?r*LFONFR<+Y%*cLOd zakzM(=8=*;8R`m+5Sj@IXZ1jxdk3ZqSH2^#3klmG1AwptHj+@Xv9xfHTUen_jLGZ>8ICAG#jM9h^V z>6OmFcw?%NXHZ@n;=zGZuf5-{T;9b(c$lFJU*=^e?9%Ui*``>zN1Kbi)YtHofW>0rss4u&9op?9M_| zpy3YLR)FEYv;%|Fm{x^K7!l>mnkIZPyWtJJ3lyr`R}~sYdGoOo;c~DsoEU! z!xXvgIx$YEWfSGlB}3zyW(qy)g%Ujz1*}+U7f>_Y1_8G<2kpJr`VnmFlfGS-=b@)= zd3r`+?I_2{=C+A_8x>VC;%T+U>pPF8afJq=LFI|#c_km*3KCwfas`Ic5Tim+J94(Jx$4YQzW>CyJU`v6u(5BqPwX2+|4eF|8rn{fQS2Jo;91sap-{?hQUcxQL9kN;ht71$I+rnBd!%56=cf4Hg;zKN@tf^B7#~Ef?Cp8y9(4crN3fkv`%Q%db=Sm!3cbWh3te*J)f8t7>5396-_l_arz5$6T5j({ zbc*VV5op0$Xm}n?=UjQBd%tt&73)W^tqs9;U7pic-G1|ik>a_x-}}qhw|ge`4SH*F z0UhYtVl>ND+&X?jd7kw6pi$X*2H)*KD??vvyR5wG0QH1*0g;!lBQdzVZ1H%=h*h1+u<70mkZ(p!gT$GVscl z4Wz4)41!nGL8dFJeTIG%eEfkAVQ5<3BZ0Ii*gn^Rdf|>QAN7Anu&+G!v9WXK-U%>n zYvVFs`pD_kLqaj*V!T162A{_WLVTBFz}ttpDd-*X2nwOzw)jx1)}Ma$3nSR-wzv}~ z{l3j$ujbrs*A@H0_a8ia+0_8s`lQ2YRaI}}BPia0tT43U(LjxJ)RP3Cp&>-!TRBdL zabQc-kSrZ^ycjLawQuL#bJ(%Fj1+(DJ$V3@=X(>&6G02Xg|;AuzIhGtg2v7W)aNY} zo$9_wq5zVmkff{%jK`7?BGmYr)!c?)!-My3I`AjHJXf6g=-!8q6swZs#_aHe6UUlD zeOJP7wD6&lPH+nOH?%EcfFG*cne56YjvR~#vJi9q3Mp%Rduo=so2%S?#+morbMy$d z(_bC_#$%Q&d1y1(8}R$q2TI`Hi^4Gyu5aifwxeiZa7Pia)bu$YloQ&GWvN~!Q8-v} zgR0@?eVZdecE`C_ANOkewp~xp@_nBC(J8M+v0c+WJKwU`veQNi^rD`H%e8*ua%s%M z`*!R$Hl#a?uyVBJTWP?d_?RNXtXM0Q8}~A^8UY~y)jNxQ^K;$$m8%Z8|GZaQp6wb~ zp0mfQqc0gLKDFeZ2VAa4CN7ucxspgfBb6HWC1fr_e5b}LnO)N;=N7ceB1>@#YR*+2 zAzRV6&f+UO&u3ANI_}Y{R*hiqc;g@X0DE)-ENE1lpw`H8g6Ce^P^s|{cwwxDWm)G{ zkgKSSpJ$!Hr`=RM<_um=p}h9Xb?+h1pM2v8mVbKFg*Z?@oH$Utu<^mivU5`2xINm` zdpBYDR+@rhUn_G$&?>(6)}po(3pPt*1z^peC_HrN8b5r2Wq=6&&Hikz2xqZ z;`dH|mSNu>pV+qu8qJtCmV?(j+=_<*>M%XNVX_4^UaiQG-?GK}B_=TqKo7Q#`d=iK`Xd4Vgg^cIooJxWZt5q$B zRAm_9>j#kL>MI|grYZ=WEv-VO?J;NO;EM}rT1tIbd$DlMhHriAu@UU^U)gyS?tF3r z3DkTu+r*O0BY~3owE99h!Rk^L_cSVFRDX||rOt1Z< NU$J7>n=Tr`{vUmNRLTGV diff --git a/test/test-repo/datastore/000053.ldb b/test/test-repo/datastore/000053.ldb deleted file mode 100644 index ee8b8a5046761153c9ee35b25cc3a3d95d6238e0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1167 zcmZSZT*s{MW*QpdXdDvaX5!`S=;7sT;vH<}9O&T|6&mOlX8#6YG{jAQvk-@Hx{=sgMVQwaFj{ZI&UXI4j9wt5> z?qR+zMxKG@E~X(q9#I}H{>H8$KEaOWVZNR&X2E8RY|uzo(=W&-CdyYcaWDuiV=`!B zR5IXYjf&W+S=9}{&nAGU;5IdHysx@r7SpP6Xkx1)s`=D=Z4J|#wO7>_D=TS z`@?v9R=0(z>Hp&Mmoy>@-kF`?62G%_sj&3)>k|9tT${4z@Gx%?BJ#?8E&r%hPUmyoZ!>DXo4ndkllxrbgkR&t^HT-jDL#faX6rrFh>KlE;! z_)z$cc(2_b-_0Qt_n$w*XH=DHnpQOw5c7L`5K@xKd7j za$=5tT4qjaaZ!GL38MxiPJ|R9r?gnI#cL+c`y$kEdZ*F8prg4nk50=@eIO`g`C-QH z_v}1M1(|upjG{0Nlf;6xn)hB=e_id~x5SoT_2=sYma~P L1·0 S·1KiB Q·14 -15:41:22.550450 table@build created L1@14 N·4 S·1KiB "/F5..7JP,v5":"/lo..ins,v12" -15:41:22.550488 version@stat F·[0 1] S·1KiB[0B 1KiB] Sc·[0.00 0.00] -15:41:22.550730 table@compaction committed F-3 S-633B Ke·0 D·6 T·918.111µs -15:41:22.550806 table@remove removed @11 -15:41:22.550862 table@remove removed @8 -15:41:22.550932 table@remove removed @5 -15:41:22.551030 table@remove removed @2 -15:41:22.551680 db@close closing -15:41:22.551796 db@close done T·112.63µs -=============== Jul 4, 2017 (WEST) =============== -15:41:39.052965 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed -15:41:39.053171 version@stat F·[0 1] S·1KiB[0B 1KiB] Sc·[0.00 0.00] -15:41:39.053188 db@open opening -15:41:39.053255 journal@recovery F·1 -15:41:39.053684 journal@recovery recovering @12 -15:41:39.056117 memdb@flush created L0@15 N·2 S·211B "/lo..oot,v16":"/lo..ins,v15" -15:41:39.056892 version@stat F·[1 1] S·1KiB[211B 1KiB] Sc·[0.25 0.00] -15:41:39.058262 db@janitor F·4 G·0 -15:41:39.058279 db@open done T·5.082794ms -15:41:39.061267 db@close closing -15:41:39.061388 db@close done T·118.126µs -=============== Jul 4, 2017 (WEST) =============== -15:41:48.623766 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed -15:41:48.623946 version@stat F·[1 1] S·1KiB[211B 1KiB] Sc·[0.25 0.00] -15:41:48.623957 db@open opening -15:41:48.623999 journal@recovery F·1 -15:41:48.624407 journal@recovery recovering @16 -15:41:48.626176 memdb@flush created L0@18 N·2 S·211B "/lo..oot,v19":"/lo..ins,v18" -15:41:48.627325 version@stat F·[2 1] S·1KiB[422B 1KiB] Sc·[0.50 0.00] -15:41:48.627922 db@janitor F·5 G·0 -15:41:48.627942 db@open done T·3.97763ms -15:41:48.629261 db@close closing -15:41:48.629399 db@close done T·136.301µs -=============== Jul 4, 2017 (WEST) =============== -15:53:08.378238 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed -15:53:08.378408 version@stat F·[2 1] S·1KiB[422B 1KiB] Sc·[0.50 0.00] -15:53:08.378420 db@open opening -15:53:08.378468 journal@recovery F·1 -15:53:08.378990 journal@recovery recovering @19 -15:53:08.380800 memdb@flush created L0@21 N·2 S·211B "/lo..oot,v22":"/lo..ins,v21" -15:53:08.381531 version@stat F·[3 1] S·1KiB[633B 1KiB] Sc·[0.75 0.00] -15:53:08.382277 db@janitor F·6 G·0 -15:53:08.382295 db@open done T·3.868457ms -15:53:08.383570 db@close closing -15:53:08.383702 db@close done T·130.606µs -=============== Jul 4, 2017 (WEST) =============== -15:54:14.936403 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed -15:54:14.936572 version@stat F·[3 1] S·1KiB[633B 1KiB] Sc·[0.75 0.00] -15:54:14.936584 db@open opening -15:54:14.936635 journal@recovery F·1 -15:54:14.936913 journal@recovery recovering @22 -15:54:14.938940 memdb@flush created L0@24 N·1 S·162B "/lo..oot,v24":"/lo..oot,v24" -15:54:14.939787 version@stat F·[4 1] S·1KiB[795B 1KiB] Sc·[1.00 0.00] -15:54:14.941728 db@janitor F·7 G·0 -15:54:14.941804 db@open done T·5.208439ms -15:54:14.941863 table@compaction L0·4 -> L1·1 S·1KiB Q·25 -15:54:14.942716 table@build created L1@27 N·4 S·1KiB "/F5..7JP,v5":"/lo..ins,v21" -15:54:14.942754 version@stat F·[0 1] S·1KiB[0B 1KiB] Sc·[0.00 0.00] -15:54:14.943032 table@compaction committed F-4 S-795B Ke·0 D·7 T·1.128534ms -15:54:14.943205 table@remove removed @24 -15:54:14.943295 table@remove removed @21 -15:54:14.943640 table@remove removed @18 -15:54:14.943818 table@remove removed @15 -15:54:14.943942 table@remove removed @14 -15:54:14.944068 db@close closing -15:54:14.944171 db@close done T·100.355µs -=============== Jul 4, 2017 (WEST) =============== -15:54:32.988001 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed -15:54:32.988316 version@stat F·[0 1] S·1KiB[0B 1KiB] Sc·[0.00 0.00] -15:54:32.988336 db@open opening -15:54:32.988423 journal@recovery F·1 -15:54:32.988947 journal@recovery recovering @25 -15:54:32.991002 memdb@flush created L0@28 N·1 S·162B "/lo..oot,v26":"/lo..oot,v26" -15:54:32.991603 version@stat F·[1 1] S·1KiB[162B 1KiB] Sc·[0.25 0.00] -15:54:32.992495 db@janitor F·4 G·0 -15:54:32.992515 db@open done T·4.171213ms -15:54:32.993870 db@close closing -15:54:32.993989 db@close done T·114.28µs -=============== Jul 4, 2017 (WEST) =============== -15:54:50.017866 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed -15:54:50.018029 version@stat F·[1 1] S·1KiB[162B 1KiB] Sc·[0.25 0.00] -15:54:50.018041 db@open opening -15:54:50.018085 journal@recovery F·1 -15:54:50.018498 journal@recovery recovering @29 -15:54:50.020254 memdb@flush created L0@31 N·1 S·162B "/lo..oot,v28":"/lo..oot,v28" -15:54:50.020663 version@stat F·[2 1] S·1KiB[324B 1KiB] Sc·[0.50 0.00] -15:54:50.021342 db@janitor F·5 G·0 -15:54:50.021365 db@open done T·3.314444ms -15:54:50.022280 db@close closing -15:54:50.022484 db@close done T·186.342µs -=============== Jul 4, 2017 (WEST) =============== -15:56:06.923937 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed -15:56:06.924230 version@stat F·[2 1] S·1KiB[324B 1KiB] Sc·[0.50 0.00] -15:56:06.924251 db@open opening -15:56:06.924325 journal@recovery F·1 -15:56:06.924883 journal@recovery recovering @32 -15:56:06.926702 memdb@flush created L0@34 N·1 S·162B "/lo..oot,v30":"/lo..oot,v30" -15:56:06.927201 version@stat F·[3 1] S·1KiB[486B 1KiB] Sc·[0.75 0.00] -15:56:06.928181 db@janitor F·6 G·0 -15:56:06.928221 db@open done T·3.959635ms -15:56:06.929783 db@close closing -15:56:06.929949 db@close done T·163.655µs -=============== Jul 4, 2017 (WEST) =============== -15:56:23.797050 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed -15:56:23.797250 version@stat F·[3 1] S·1KiB[486B 1KiB] Sc·[0.75 0.00] -15:56:23.797260 db@open opening -15:56:23.797307 journal@recovery F·1 -15:56:23.797703 journal@recovery recovering @35 -15:56:23.799545 memdb@flush created L0@37 N·1 S·162B "/lo..oot,v32":"/lo..oot,v32" -15:56:23.800045 version@stat F·[4 1] S·1KiB[648B 1KiB] Sc·[1.00 0.00] -15:56:23.800988 db@janitor F·7 G·0 -15:56:23.801011 db@open done T·3.743272ms -15:56:23.801104 table@compaction L0·4 -> L1·1 S·1KiB Q·33 -15:56:23.801542 table@build created L1@40 N·4 S·1KiB "/F5..7JP,v5":"/lo..ins,v21" -15:56:23.801571 version@stat F·[0 1] S·1KiB[0B 1KiB] Sc·[0.00 0.00] -15:56:23.801657 table@compaction committed F-4 S-648B Ke·0 D·4 T·523.359µs -15:56:23.801745 table@remove removed @37 -15:56:23.801794 table@remove removed @34 -15:56:23.801894 table@remove removed @31 -15:56:23.801997 table@remove removed @28 -15:56:23.802077 table@remove removed @27 -15:56:23.802676 db@close closing -15:56:23.802806 db@close done T·127.71µs -=============== Jul 4, 2017 (WEST) =============== -15:56:40.131595 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed -15:56:40.131793 version@stat F·[0 1] S·1KiB[0B 1KiB] Sc·[0.00 0.00] -15:56:40.131804 db@open opening -15:56:40.131847 journal@recovery F·1 -15:56:40.132170 journal@recovery recovering @38 -15:56:40.134004 memdb@flush created L0@41 N·1 S·162B "/lo..oot,v34":"/lo..oot,v34" -15:56:40.134473 version@stat F·[1 1] S·1KiB[162B 1KiB] Sc·[0.25 0.00] -15:56:40.135186 db@janitor F·4 G·0 -15:56:40.135204 db@open done T·3.393383ms -15:56:40.142721 db@close closing -15:56:40.142910 db@close done T·182.273µs -=============== Jul 4, 2017 (WEST) =============== -15:56:55.053800 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed -15:56:55.053959 version@stat F·[1 1] S·1KiB[162B 1KiB] Sc·[0.25 0.00] -15:56:55.053971 db@open opening -15:56:55.054018 journal@recovery F·1 -15:56:55.054340 journal@recovery recovering @42 -15:56:55.056275 memdb@flush created L0@44 N·1 S·162B "/lo..oot,v36":"/lo..oot,v36" -15:56:55.056865 version@stat F·[2 1] S·1KiB[324B 1KiB] Sc·[0.50 0.00] -15:56:55.057913 db@janitor F·5 G·0 -15:56:55.057935 db@open done T·3.957331ms -15:56:55.067574 db@close closing -15:56:55.067860 db@close done T·277.099µs -=============== Jul 4, 2017 (WEST) =============== -16:00:39.662902 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed -16:00:39.664111 version@stat F·[2 1] S·1KiB[324B 1KiB] Sc·[0.50 0.00] -16:00:39.664131 db@open opening -16:00:39.664211 journal@recovery F·1 -16:00:39.664643 journal@recovery recovering @45 -16:00:39.665526 memdb@flush created L0@47 N·1 S·162B "/lo..oot,v38":"/lo..oot,v38" -16:00:39.666652 version@stat F·[3 1] S·1KiB[486B 1KiB] Sc·[0.75 0.00] -16:00:39.671269 db@janitor F·6 G·0 -16:00:39.671306 db@open done T·7.162312ms -16:00:39.672402 db@close closing -16:00:39.672694 db@close done T·288.456µs -=============== Jul 4, 2017 (WEST) =============== -16:01:03.403625 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed -16:01:03.403841 version@stat F·[3 1] S·1KiB[486B 1KiB] Sc·[0.75 0.00] -16:01:03.403853 db@open opening -16:01:03.403895 journal@recovery F·1 -16:01:03.404257 journal@recovery recovering @48 -16:01:03.406212 memdb@flush created L0@50 N·1 S·162B "/lo..oot,v40":"/lo..oot,v40" -16:01:03.406706 version@stat F·[4 1] S·1KiB[648B 1KiB] Sc·[1.00 0.00] -16:01:03.407485 db@janitor F·7 G·0 -16:01:03.407505 db@open done T·3.645072ms -16:01:03.407599 table@compaction L0·4 -> L1·1 S·1KiB Q·41 -16:01:03.408141 table@build created L1@53 N·4 S·1KiB "/F5..7JP,v5":"/lo..ins,v21" -16:01:03.408173 version@stat F·[0 1] S·1KiB[0B 1KiB] Sc·[0.00 0.00] -16:01:03.408402 table@compaction committed F-4 S-648B Ke·0 D·4 T·777.962µs -16:01:03.408479 table@remove removed @50 -16:01:03.408534 table@remove removed @47 -16:01:03.410564 table@remove removed @44 -16:01:03.411741 table@remove removed @41 -16:01:03.412602 table@remove removed @40 diff --git a/test/test-repo/datastore/MANIFEST-000052 b/test/test-repo/datastore/MANIFEST-000052 deleted file mode 100644 index aa43ddef7b07c77989b45de98ce9be14b95d36c9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 592 zcmbOQ6?zE@eX$~ zb@dDgGK&feF$oEZ@C)_|i*gJw3U_rf_X=QSh1equ4f=x2ykbUCh*;IC_$&PkjLc@( dVg~9AHU=X$27NXL9X19{HbxD0MpIh53IOx5l%xOv diff --git a/test/test-repo/datastore_spec b/test/test-repo/datastore_spec deleted file mode 100644 index 7bf9626c..00000000 --- a/test/test-repo/datastore_spec +++ /dev/null @@ -1 +0,0 @@ -{"mounts":[{"mountpoint":"/blocks","path":"blocks","shardFunc":"/repo/flatfs/shard/v1/next-to-last/2","type":"flatfs"},{"mountpoint":"/","path":"datastore","type":"levelds"}],"type":"mount"} \ No newline at end of file diff --git a/test/test-repo/version b/test/test-repo/version deleted file mode 100644 index 7f8f011e..00000000 --- a/test/test-repo/version +++ /dev/null @@ -1 +0,0 @@ -7 diff --git a/tsconfig.json b/tsconfig.json index 5681ae6f..ee88f58d 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -9,7 +9,6 @@ }, "include": [ "types", - "test", "src" ] } diff --git a/types/merge-options/index.d.ts b/types/merge-options/index.d.ts index bb010deb..e5c2d769 100644 --- a/types/merge-options/index.d.ts +++ b/types/merge-options/index.d.ts @@ -1,2 +1,2 @@ -declare function mergeOptions (arg1: T1, arg: T2): T1 & T2 +declare function mergeOptions (arg1: T1, arg: T2): Required export = mergeOptions From fb633e7dc87ed50894d509b691b8c1837f310c62 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Tue, 29 Jun 2021 13:01:43 +0100 Subject: [PATCH 14/25] chore: export ipfsrepo interface type --- src/index.js | 1 + 1 file changed, 1 insertion(+) diff --git a/src/index.js b/src/index.js index c016649e..909a8915 100644 --- a/src/index.js +++ b/src/index.js @@ -38,6 +38,7 @@ const AUTO_MIGRATE_CONFIG_KEY = 'repoAutoMigrate' * @typedef {import('interface-datastore').Datastore} Datastore * @typedef {import('interface-blockstore').Blockstore} Blockstore * @typedef {import('./types').Backends} Backends + * @typedef {import('./types').IPFSRepo} IPFSRepo */ /** From 78e167f5eaf0c5242f5f9b9587e9002154f7b274 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Tue, 29 Jun 2021 13:12:12 +0100 Subject: [PATCH 15/25] chore: export pin types --- src/gc.js | 4 ++-- src/index.js | 5 +++-- src/pinned-blockstore.js | 5 +++-- src/pins.js | 7 ++++--- 4 files changed, 12 insertions(+), 9 deletions(-) diff --git a/src/gc.js b/src/gc.js index 727f1ed0..41a16a8b 100644 --- a/src/gc.js +++ b/src/gc.js @@ -28,7 +28,7 @@ const MFS_ROOT_KEY = new Key('/local/filesroot') * * @param {object} config * @param {import('./types').GCLock} config.gcLock - * @param {import('./pins')} config.pins + * @param {import('./pins').Pins} config.pins * @param {Blockstore} config.blockstore * @param {import('interface-datastore').Datastore} config.root * @param {loadCodec} config.loadCodec @@ -62,7 +62,7 @@ module.exports = ({ gcLock, pins, blockstore, root, loadCodec }) => { * Get Set of CIDs of blocks to keep * * @param {object} config - * @param {import('./pins')} config.pins + * @param {import('./pins').Pins} config.pins * @param {import('interface-blockstore').Blockstore} config.blockstore * @param {import('interface-datastore').Datastore} config.root * @param {loadCodec} config.loadCodec diff --git a/src/index.js b/src/index.js index 909a8915..9b7db732 100644 --- a/src/index.js +++ b/src/index.js @@ -15,7 +15,7 @@ const createIdstore = require('./idstore') const defaultOptions = require('./default-options') const defaultDatastore = require('./default-datastore') const ERRORS = require('./errors') -const Pins = require('./pins') +const { Pins, PinTypes } = require('./pins') const createPinnedBlockstore = require('./pinned-blockstore') // @ts-ignore - no types const mortice = require('mortice') @@ -448,7 +448,8 @@ module.exports = { locks: { memory: MemoryLock, fs: FSLock - } + }, + PinTypes } /** diff --git a/src/pinned-blockstore.js b/src/pinned-blockstore.js index e323e360..8ac95c4d 100644 --- a/src/pinned-blockstore.js +++ b/src/pinned-blockstore.js @@ -2,7 +2,7 @@ const map = require('it-map') const errCode = require('err-code') -const Pins = require('./pins') +const { PinTypes } = require('./pins') /** * @typedef {import('interface-datastore').Query} Query @@ -10,6 +10,7 @@ const Pins = require('./pins') * @typedef {import('interface-datastore').Options} DatastoreOptions * @typedef {import('interface-blockstore').Blockstore} Blockstore * @typedef {import('multiformats/cid').CID} CID + * @typedef {import('./pins').Pins} Pins */ /** @@ -86,7 +87,7 @@ function createPinnedBlockstore (pins, store) { * @param {Pins} pins */ async function ensureNotPinned (cid, pins) { - const { pinned, reason } = await pins.isPinnedWithType(cid, Pins.PinTypes.all) + const { pinned, reason } = await pins.isPinnedWithType(cid, PinTypes.all) if (pinned) { throw errCode(new Error(`pinned: ${reason}`), 'ERR_BLOCK_PINNED') diff --git a/src/pins.js b/src/pins.js index 355632cc..7595c29f 100644 --- a/src/pins.js +++ b/src/pins.js @@ -306,6 +306,7 @@ class Pins { } } -Pins.PinTypes = PinTypes - -module.exports = Pins +module.exports = { + Pins, + PinTypes +} From e3504ec3de6ddf242186e46b361cd805138b68a4 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Wed, 30 Jun 2021 12:33:24 +0100 Subject: [PATCH 16/25] chore: fix up types --- src/gc.js | 6 ++++++ src/index.js | 4 ++-- src/pins.js | 24 ++++++++++-------------- src/types.d.ts | 50 ++++++++++++++++++++++++++++++++++++++++---------- 4 files changed, 58 insertions(+), 26 deletions(-) diff --git a/src/gc.js b/src/gc.js index 41a16a8b..a9fad168 100644 --- a/src/gc.js +++ b/src/gc.js @@ -21,6 +21,8 @@ const MFS_ROOT_KEY = new Key('/local/filesroot') /** * @typedef {import('interface-blockstore').Blockstore} Blockstore * @typedef {import('./types').loadCodec} loadCodec + * @typedef {import('./types').GCErrorResult} GCErrorResult + * @typedef {import('./types').GCSuccessResult} GCSuccessResult */ /** @@ -34,6 +36,9 @@ const MFS_ROOT_KEY = new Key('/local/filesroot') * @param {loadCodec} config.loadCodec */ module.exports = ({ gcLock, pins, blockstore, root, loadCodec }) => { + /** + * @returns {AsyncGenerator} + */ async function * gc () { const start = Date.now() log('Creating set of marked blocks') @@ -47,6 +52,7 @@ module.exports = ({ gcLock, pins, blockstore, root, loadCodec }) => { const blockKeys = blockstore.queryKeys({}) // Delete blocks that are not being used + // @ts-ignore ts cannot tell that we filter out null results yield * deleteUnmarkedBlocks({ blockstore }, markedSet, blockKeys) log(`Complete (${Date.now() - start}ms)`) diff --git a/src/index.js b/src/index.js index 9b7db732..2207def6 100644 --- a/src/index.js +++ b/src/index.js @@ -15,7 +15,7 @@ const createIdstore = require('./idstore') const defaultOptions = require('./default-options') const defaultDatastore = require('./default-datastore') const ERRORS = require('./errors') -const { Pins, PinTypes } = require('./pins') +const { PinManager, PinTypes } = require('./pins') const createPinnedBlockstore = require('./pinned-blockstore') // @ts-ignore - no types const mortice = require('mortice') @@ -71,7 +71,7 @@ class Repo { const blockstore = backends.blocks const pinstore = backends.pins - this.pins = new Pins({ pinstore, blockstore, loadCodec }) + this.pins = new PinManager({ pinstore, blockstore, loadCodec }) // this blockstore will not delete blocks that have been pinned const pinnedBlockstore = createPinnedBlockstore(this.pins, blockstore) diff --git a/src/pins.js b/src/pins.js index 7595c29f..9f3535b4 100644 --- a/src/pins.js +++ b/src/pins.js @@ -15,7 +15,7 @@ const { const walkDag = require('./utils/walk-dag') /** - * @typedef {object} Pin + * @typedef {object} PinInternal * @property {number} depth * @property {import('multiformats/cid').CIDVersion} [version] * @property {number} [codec] @@ -26,13 +26,9 @@ const walkDag = require('./utils/walk-dag') * @typedef {import('./types').PinType} PinType * @typedef {import('./types').PinQueryType} PinQueryType * @typedef {import('multiformats/codecs/interface').BlockCodec} BlockCodec - */ - -/** - * @typedef {Object} PinOptions - * @property {any} [metadata] - * + * @typedef {import('./types').PinOptions} PinOptions * @typedef {import('./types').AbortOptions} AbortOptions + * @typedef {import('./types').Pins} Pins */ /** @@ -54,7 +50,10 @@ const PinTypes = { all: ('all') } -class Pins { +/** + * @implements {Pins} + */ +class PinManager { /** * @param {Object} config * @param {import('interface-datastore').Datastore} config.pinstore @@ -73,12 +72,11 @@ class Pins { /** * @param {CID} cid * @param {PinOptions & AbortOptions} [options] - * @returns {Promise} */ async pinDirectly (cid, options = {}) { await this.blockstore.get(cid, options) - /** @type {Pin} */ + /** @type {PinInternal} */ const pin = { depth: 0 } @@ -101,7 +99,6 @@ class Pins { /** * @param {CID} cid * @param {AbortOptions} [options] - * @returns {Promise} */ unpin (cid, options) { return this.pinstore.delete(cidToKey(cid), options) @@ -110,12 +107,11 @@ class Pins { /** * @param {CID} cid * @param {PinOptions & AbortOptions} [options] - * @returns {Promise} */ async pinRecursively (cid, options = {}) { await this.fetchCompleteDag(cid, options) - /** @type {Pin} */ + /** @type {PinInternal} */ const pin = { depth: Infinity } @@ -307,6 +303,6 @@ class Pins { } module.exports = { - Pins, + PinManager, PinTypes } diff --git a/src/types.d.ts b/src/types.d.ts index 304a3da2..0b7336f0 100644 --- a/src/types.d.ts +++ b/src/types.d.ts @@ -1,7 +1,7 @@ import type { Datastore } from 'interface-datastore' import type { Blockstore } from 'interface-blockstore' -import type { CID } from 'multiformats' +import type { CID, CIDVersion } from 'multiformats/cid' import type { BlockCodec } from 'multiformats/codecs/interface' export type AwaitIterable = Iterable | AsyncIterable @@ -77,13 +77,7 @@ export interface IPFSRepo { gcLock: GCLock - gc: () => AsyncGenerator<{ - err: Error; - cid?: undefined; - } | { - cid: CID; - err?: undefined; - } | null, void, unknown> + gc: () => AsyncGenerator /** * Initialize a new repo. @@ -138,6 +132,10 @@ export interface Backends { pins: Datastore } +export interface LockCloser { + close: () => Promise +} + export interface RepoLock { /** * Sets the lock if one does not already exist. If a lock already exists, should throw an error. @@ -157,8 +155,14 @@ export interface GCLock { writeLock: () => Promise } -export interface LockCloser { - close: () => Promise +export interface GCErrorResult { + err: Error + cid?: undefined +} + +export interface GCSuccessResult { + cid: CID + err?: undefined } export interface Stat { @@ -286,6 +290,32 @@ export type PinType = 'recursive' | 'direct' | 'indirect' | 'all' export type PinQueryType = 'recursive' | 'direct' | 'indirect' | 'all' +export interface PinOptions extends AbortOptions { + metadata?: Record +} + +export interface Pin { + cid: CID + metadata?: Record +} + +export interface PinnedWithTypeResult { + cid: CID + pinned: boolean + reason?: PinType + metadata?: Record +} + +export interface Pins { + pinDirectly: (cid: CID, options?: PinOptions) => Promise + pinRecursively: (cid: CID, options?: PinOptions) => Promise + unpin: (cid: CID, options?: AbortOptions) => Promise + directKeys: (options?: AbortOptions) => AsyncGenerator + recursiveKeys: (options?: AbortOptions) => AsyncGenerator + indirectKeys: (options?: AbortOptions) => AsyncGenerator + isPinnedWithType: (cid: CID, types: PinQueryType|PinQueryType[], options?: AbortOptions) => Promise +} + export interface AbortOptions { signal?: AbortSignal } From 439eaa2a1be24c77055a4281e2b8135bcacaa789 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 1 Jul 2021 10:56:47 +0100 Subject: [PATCH 17/25] chore: fix types, add type tests and better pin tests --- package.json | 11 +- src/types.d.ts | 101 +++++++++-------- src/utils/walk-dag.js | 4 +- test/lock-test.js | 4 + test/migrations-test.js | 3 + test/node.js | 3 + test/pins-test.js | 245 +++++++++++++++++++++++++++++++++------- test/repo-test.js | 6 +- test/types.test-d.ts | 18 +++ tsconfig.json | 3 +- 10 files changed, 301 insertions(+), 97 deletions(-) create mode 100644 test/types.test-d.ts diff --git a/package.json b/package.json index e101e651..f64510c6 100644 --- a/package.json +++ b/package.json @@ -5,6 +5,9 @@ "leadMaintainer": "Alex Potsides ", "main": "src/index.js", "types": "dist/src/index.d.ts", + "tsd": { + "directory": "test" + }, "files": [ "src", "dist" @@ -17,7 +20,7 @@ }, "scripts": { "prepare": "aegir build --no-bundle", - "test": "aegir test", + "test": "tsd && aegir test", "test:node": "aegir test -t node", "test:browser": "aegir test -t browser", "test:webworker": "aegir test -t webworker", @@ -45,8 +48,6 @@ "npm": ">=6.0.0" }, "devDependencies": { - "@ipld/dag-cbor": "^6.0.4", - "@ipld/dag-pb": "^2.1.0", "@types/bytes": "^3.1.0", "@types/debug": "^4.1.5", "@types/proper-lockfile": "^4.1.1", @@ -62,14 +63,18 @@ "just-range": "^2.1.0", "rimraf": "^3.0.0", "sinon": "^11.1.1", + "tsd": "^0.17.0", "url": "^0.11.0", "util": "^0.12.3" }, "dependencies": { + "@ipld/dag-cbor": "^6.0.4", + "@ipld/dag-pb": "^2.1.0", "bytes": "^3.1.0", "cborg": "^1.3.4", "debug": "^4.1.0", "err-code": "^3.0.1", + "eslint-plugin-ava": "^12.0.0", "interface-blockstore": "^0.1.0", "interface-datastore": "^5.0.0", "ipfs-repo-migrations": "ipfs/js-ipfs-repo-migrations#chore/upgrade-multiformats", diff --git a/src/types.d.ts b/src/types.d.ts index 0b7336f0..b78796f4 100644 --- a/src/types.d.ts +++ b/src/types.d.ts @@ -1,7 +1,7 @@ import type { Datastore } from 'interface-datastore' import type { Blockstore } from 'interface-blockstore' -import type { CID, CIDVersion } from 'multiformats/cid' +import type { CID } from 'multiformats/cid' import type { BlockCodec } from 'multiformats/codecs/interface' export type AwaitIterable = Iterable | AsyncIterable @@ -32,47 +32,47 @@ export interface Options { } export interface IPFSRepo { - closed: boolean; - path: string; - root: Datastore; - datastore: Datastore; - keys: Datastore; - pins: Pins; - blocks: Blockstore; + closed: boolean + path: string + root: Datastore + datastore: Datastore + keys: Datastore + pins: Pins + blocks: Blockstore version: { - exists(): Promise; - get(): Promise; - set(version: number): Promise; - check(expected: number): Promise; + exists: () => Promise + get: () => Promise + set: (version: number) => Promise + check: (expected: number) => Promise } config: { - getAll(options?: { - signal?: AbortSignal | undefined; - } | undefined): Promise; - get(key: string, options?: { - signal?: AbortSignal | undefined; - } | undefined): Promise; - set(key: string, value?: any, options?: { - signal?: AbortSignal | undefined; - } | undefined): Promise; - replace(value?: import("./types").Config | undefined, options?: { - signal?: AbortSignal | undefined; - } | undefined): Promise; - exists(): Promise; + getAll: (options?: { + signal?: AbortSignal + }) => Promise + get: (key: string, options?: { + signal?: AbortSignal + }) => Promise + set: (key: string, value?: any, options?: { + signal?: AbortSignal + }) => Promise + replace: (value?: import('./types').Config, options?: { + signal?: AbortSignal + }) => Promise + exists: () => Promise } spec: { - exists(): Promise; - get(): Promise; - set(spec: any): Promise; + exists: () => Promise + get: () => Promise + set: (spec: any) => Promise } apiAddr: { - get(): Promise; - set(value: string): Promise; - delete(): Promise; + get: () => Promise + set: (value: string) => Promise + delete: () => Promise } gcLock: GCLock @@ -85,14 +85,14 @@ export interface IPFSRepo { * @param {import('./types').Config} config - config to write into `config`. * @returns {Promise} */ - init(config: Config): Promise + init: (config: Config) => Promise /** * Check if the repo is already initialized. * * @returns {Promise} */ - isInitialized(): Promise + isInitialized: () => Promise /** * Open the repo. If the repo is already open an error will be thrown. @@ -100,28 +100,28 @@ export interface IPFSRepo { * * @returns {Promise} */ - open(): Promise + open: () => Promise /** * Close the repo and cleanup. * * @returns {Promise} */ - close(): Promise + close: () => Promise /** * Check if a repo exists. * * @returns {Promise} */ - exists(): Promise + exists: () => Promise /** * Get repo status. * * @returns {Promise} */ - stat(): Promise + stat: () => Promise } export interface Backends { @@ -148,7 +148,7 @@ export interface RepoLock { locked: (path: string) => Promise } -export type ReleaseLock = () => void +export interface ReleaseLock { (): void } export interface GCLock { readLock: () => Promise @@ -175,7 +175,7 @@ export interface Stat { export interface Config { Addresses?: AddressConfig - API?: APIConfig, + API?: APIConfig Profiles?: string Bootstrap?: string[] Discovery?: DiscoveryConfig @@ -195,8 +195,8 @@ export interface AddressConfig { RPC?: string Delegates?: string[] Gateway?: string - Swarm?: string[], - Announce?: string[], + Swarm?: string[] + Announce?: string[] NoAnnounce?: string[] } @@ -223,22 +223,22 @@ export interface DatastoreConfig { } export interface DatastoreType { - type: string, - path: string, - sync?: boolean, - shardFunc?: string, + type: string + path: string + sync?: boolean + shardFunc?: string compression?: string } export interface DatastoreMountPoint { - mountpoint: string, - type: string, - prefix: string, + mountpoint: string + type: string + prefix: string child: DatastoreType } export interface DatastoreSpec { - type?: string, + type?: string mounts?: DatastoreMountPoint[] } @@ -304,6 +304,7 @@ export interface PinnedWithTypeResult { pinned: boolean reason?: PinType metadata?: Record + parent?: CID } export interface Pins { @@ -320,4 +321,4 @@ export interface AbortOptions { signal?: AbortSignal } -export type loadCodec = (codeOrName: number | string) => Promise> +export interface loadCodec { (codeOrName: number | string): Promise> } diff --git a/src/utils/walk-dag.js b/src/utils/walk-dag.js index c472ce3e..f2073f5a 100644 --- a/src/utils/walk-dag.js +++ b/src/utils/walk-dag.js @@ -64,14 +64,14 @@ function * dagCborLinks (obj, path = [], parseBuffer = true) { const __path = _path.slice() __path.push(i.toString()) const o = val[i] - if (CID.isCID(o)) { // eslint-disable-line max-depth + if (o instanceof CID) { // eslint-disable-line max-depth yield [__path.join('/'), o] } else if (typeof o === 'object') { yield * dagCborLinks(o, _path, false) } } } else { - if (CID.isCID(val)) { + if (val instanceof CID) { yield [_path.join('/'), val] } else { yield * dagCborLinks(val, _path, false) diff --git a/test/lock-test.js b/test/lock-test.js index 2259d1fb..6220564d 100644 --- a/test/lock-test.js +++ b/test/lock-test.js @@ -13,6 +13,7 @@ const loadCodec = require('./fixtures/load-codec') module.exports = (repo) => { describe('Repo lock tests', () => { it('should handle locking for a repo lifecycle', async () => { + // @ts-expect-error lockfile is not part of the interface expect(repo.lockfile).to.not.equal(null) await repo.close() await repo.open() @@ -20,11 +21,14 @@ module.exports = (repo) => { it('should prevent multiple repos from using the same path', async () => { const repoClone = createRepo(repo.path, loadCodec, { + // @ts-expect-error blockstore is not part of the interface blocks: repo.pins.blockstore, datastore: repo.datastore, root: repo.root, keys: repo.keys, + // @ts-expect-error pinstore is not part of the interface pins: repo.pins.pinstore + // @ts-expect-error options is not part of the interface }, repo.options) try { await repoClone.init({}) diff --git a/test/migrations-test.js b/test/migrations-test.js index e3ba86c8..2ee3c4f4 100644 --- a/test/migrations-test.js +++ b/test/migrations-test.js @@ -78,11 +78,14 @@ module.exports = (createTempRepo) => { // @ts-expect-error options is a private field const newOpts = Object.assign({}, repo.options) newOpts.autoMigrate = option + // @ts-expect-error loadCodec is a private field const newRepo = createRepo(repo.path, repo.pins.loadCodec, { + // @ts-expect-error blockstore is a private field blocks: repo.pins.blockstore, datastore: repo.datastore, root: repo.root, keys: repo.keys, + // @ts-expect-error pinstore is a private field pins: repo.pins.pinstore }, newOpts) diff --git a/test/node.js b/test/node.js index 6098620c..12d4fc8a 100644 --- a/test/node.js +++ b/test/node.js @@ -40,6 +40,9 @@ describe('IPFS Repo Tests onNode.js', () => { { name: 'memory', opts: { + autoMigrate: true, + onMigrationProgress: () => {}, + repoOwner: true, repoLock: MemoryLock } } diff --git a/test/pins-test.js b/test/pins-test.js index bb9a207f..edf4f46e 100644 --- a/test/pins-test.js +++ b/test/pins-test.js @@ -3,73 +3,238 @@ 'use strict' const { expect } = require('aegir/utils/chai') -const range = require('just-range') -const Key = require('interface-datastore').Key +const dagPb = require('@ipld/dag-pb') +const dagCbor = require('@ipld/dag-cbor') +const { sha256 } = require('multiformats/hashes/sha2') +const { CID } = require('multiformats/cid') +const all = require('it-all') +const { + PinTypes +} = require('../src/pins') const uint8ArrayFromString = require('uint8arrays/from-string') +/** + * @param {import('@ipld/dag-pb').PBNode} node + */ +async function createDagPbNode (node = { Data: uint8ArrayFromString(`data-${Math.random()}`), Links: [] }) { + const buf = dagPb.encode(node) + const hash = await sha256.digest(buf) + const cid = CID.createV0(hash) + + return { + cid, + buf, + node + } +} + +/** + * @param {any} node + */ +async function createDagCborNode (node = { Data: uint8ArrayFromString(`data-${Math.random()}`), Links: [] }) { + const buf = dagCbor.encode(node) + const hash = await sha256.digest(buf) + const cid = CID.createV1(dagCbor.code, hash) + + return { + cid, + buf, + node + } +} + /** * @param {import('../src/types').IPFSRepo} repo */ module.exports = (repo) => { describe('pins', () => { - const dataList = range(100).map((i) => uint8ArrayFromString(`hello-${i}-${Math.random()}`)) - const data = uint8ArrayFromString('hello world') - const b = new Key('hello') - it('exists', () => { expect(repo).to.have.property('pins') }) - describe('.put', () => { - it('simple', async () => { - await repo.pins.pinstore.put(b, data) + describe('.pinDirectly', () => { + it('pins a block directly', async () => { + const { cid, buf } = await createDagPbNode() + + await repo.blocks.put(cid, buf) + await repo.pins.pinDirectly(cid) + + const pins = await all(repo.pins.directKeys()) + + expect(pins.map(p => p.cid.toString())).to.include(cid.toString()) + }) + + it('pins a block directly with metadata', async () => { + const { cid, buf } = await createDagPbNode() + + const metadata = { + foo: 'bar' + } + + await repo.blocks.put(cid, buf) + await repo.pins.pinDirectly(cid, { metadata }) + + const pins = await all(repo.pins.directKeys()) + + expect(pins.filter(p => p.cid.toString() === cid.toString())) + .to.have.deep.nested.property('[0].metadata', metadata) }) + }) + + describe('.pinRecursively', () => { + it('pins a block recursively', async () => { + const { cid, buf } = await createDagPbNode() + + await repo.blocks.put(cid, buf) + await repo.pins.pinRecursively(cid) - it('multi write (locks)', async () => { - await Promise.all([repo.pins.pinstore.put(b, data), repo.pins.pinstore.put(b, data)]) + const pins = await all(repo.pins.recursiveKeys()) + + expect(pins.map(p => p.cid.toString())).to.include(cid.toString()) }) - it('massive multiwrite', async function () { - this.timeout(15000) // add time for ci - await Promise.all(range(100).map((i) => { - return repo.pins.pinstore.put(new Key('hello' + i), dataList[i]) - })) + it('pins a block recursively with metadata', async () => { + const { cid, buf } = await createDagPbNode() + + const metadata = { + foo: 'bar' + } + + await repo.blocks.put(cid, buf) + await repo.pins.pinRecursively(cid, { metadata }) + + const pins = await all(repo.pins.recursiveKeys()) + + expect(pins.filter(p => p.cid.toString() === cid.toString())) + .to.have.deep.nested.property('[0].metadata', metadata) }) }) - describe('.get', () => { - it('simple', async () => { - const val = await repo.pins.pinstore.get(b) - expect(val).to.be.eql(data) + describe('.unpin', () => { + it('unpins directly', async () => { + const { cid, buf } = await createDagPbNode() + + await repo.blocks.put(cid, buf) + await repo.pins.pinDirectly(cid) + + await expect(repo.pins.isPinnedWithType(cid, PinTypes.direct)).to.eventually.have.property('pinned', true) + + await repo.pins.unpin(cid) + + await expect(repo.pins.isPinnedWithType(cid, PinTypes.direct)).to.eventually.have.property('pinned', false) }) - it('massive read', async function () { - this.timeout(15000) // add time for ci - await Promise.all(range(20 * 100).map(async (i) => { - const j = i % dataList.length - const val = await repo.pins.pinstore.get(new Key('hello' + j)) - expect(val).to.be.eql(dataList[j]) - })) - }).timeout(10 * 1000) + it('unpins recursively', async () => { + const { cid, buf } = await createDagPbNode() + + await repo.blocks.put(cid, buf) + await repo.pins.pinRecursively(cid) + + await expect(repo.pins.isPinnedWithType(cid, PinTypes.recursive)).to.eventually.have.property('pinned', true) + + await repo.pins.unpin(cid) + + await expect(repo.pins.isPinnedWithType(cid, PinTypes.recursive)).to.eventually.have.property('pinned', false) + }) }) - describe('.has', () => { - it('existing pin', async () => { - const exists = await repo.pins.pinstore.has(b) - expect(exists).to.eql(true) + describe('.indirectKeys', () => { + it('lists indirectly pinned keys', async () => { + const child = await createDagPbNode() + const parent = await createDagPbNode({ + Links: [{ + Name: 'child', + Tsize: child.buf.length, + Hash: child.cid + }] + }) + + await repo.blocks.put(child.cid, child.buf) + await repo.blocks.put(parent.cid, parent.buf) + await repo.pins.pinRecursively(parent.cid) + + const pins = await all(repo.pins.indirectKeys()) + + expect(pins.map(c => c.toString())).to.include(child.cid.toString()) }) - it('non existent pin', async () => { - const exists = await repo.pins.pinstore.has(new Key('world')) - expect(exists).to.eql(false) + it('lists indirectly pinned cbor keys', async () => { + const child = await createDagCborNode() + const parent = await createDagCborNode({ + child: { '/': child.cid } + }) + + await repo.blocks.put(child.cid, child.buf) + await repo.blocks.put(parent.cid, parent.buf) + await repo.pins.pinRecursively(parent.cid) + + const pins = await all(repo.pins.indirectKeys()) + + expect(pins.map(c => c.toString())).to.include(child.cid.toString()) }) }) - describe('.delete', () => { - it('simple', async () => { - await repo.pins.pinstore.delete(b) - const exists = await repo.pins.pinstore.has(b) - expect(exists).to.equal(false) + describe('.isPinnedWithType', () => { + it('reports directly pinned blocks', async () => { + const { cid, buf } = await createDagPbNode() + + await repo.blocks.put(cid, buf) + + await expect(repo.pins.isPinnedWithType(cid, PinTypes.direct)).to.eventually.have.property('pinned', false) + await expect(repo.pins.isPinnedWithType(cid, PinTypes.recursive)).to.eventually.have.property('pinned', false) + await expect(repo.pins.isPinnedWithType(cid, PinTypes.indirect)).to.eventually.have.property('pinned', false) + await expect(repo.pins.isPinnedWithType(cid, PinTypes.all)).to.eventually.have.property('pinned', false) + + await repo.pins.pinDirectly(cid) + + await expect(repo.pins.isPinnedWithType(cid, PinTypes.direct)).to.eventually.have.property('pinned', true) + await expect(repo.pins.isPinnedWithType(cid, PinTypes.recursive)).to.eventually.have.property('pinned', false) + await expect(repo.pins.isPinnedWithType(cid, PinTypes.indirect)).to.eventually.have.property('pinned', false) + await expect(repo.pins.isPinnedWithType(cid, PinTypes.all)).to.eventually.have.property('pinned', true) + }) + + it('reports recursively pinned blocks', async () => { + const { cid, buf } = await createDagPbNode() + + await repo.blocks.put(cid, buf) + + await expect(repo.pins.isPinnedWithType(cid, PinTypes.direct)).to.eventually.have.property('pinned', false) + await expect(repo.pins.isPinnedWithType(cid, PinTypes.recursive)).to.eventually.have.property('pinned', false) + await expect(repo.pins.isPinnedWithType(cid, PinTypes.indirect)).to.eventually.have.property('pinned', false) + await expect(repo.pins.isPinnedWithType(cid, PinTypes.all)).to.eventually.have.property('pinned', false) + + await repo.pins.pinRecursively(cid) + + await expect(repo.pins.isPinnedWithType(cid, PinTypes.direct)).to.eventually.have.property('pinned', false) + await expect(repo.pins.isPinnedWithType(cid, PinTypes.recursive)).to.eventually.have.property('pinned', true) + await expect(repo.pins.isPinnedWithType(cid, PinTypes.indirect)).to.eventually.have.property('pinned', false) + await expect(repo.pins.isPinnedWithType(cid, PinTypes.all)).to.eventually.have.property('pinned', true) + }) + + it('reports indirectly pinned blocks', async () => { + const child = await createDagPbNode() + const parent = await createDagPbNode({ + Links: [{ + Name: 'child', + Tsize: child.buf.length, + Hash: child.cid + }] + }) + + await repo.blocks.put(child.cid, child.buf) + await repo.blocks.put(parent.cid, parent.buf) + + await expect(repo.pins.isPinnedWithType(child.cid, PinTypes.direct)).to.eventually.have.property('pinned', false) + await expect(repo.pins.isPinnedWithType(child.cid, PinTypes.recursive)).to.eventually.have.property('pinned', false) + await expect(repo.pins.isPinnedWithType(child.cid, PinTypes.indirect)).to.eventually.have.property('pinned', false) + await expect(repo.pins.isPinnedWithType(child.cid, PinTypes.all)).to.eventually.have.property('pinned', false) + + await repo.pins.pinRecursively(parent.cid) + + await expect(repo.pins.isPinnedWithType(child.cid, PinTypes.direct)).to.eventually.have.property('pinned', false) + await expect(repo.pins.isPinnedWithType(child.cid, PinTypes.recursive)).to.eventually.have.property('pinned', false) + await expect(repo.pins.isPinnedWithType(child.cid, PinTypes.indirect)).to.eventually.have.property('pinned', true) + await expect(repo.pins.isPinnedWithType(child.cid, PinTypes.all)).to.eventually.have.property('pinned', true) }) }) }) diff --git a/test/repo-test.js b/test/repo-test.js index e3aa979b..59135104 100644 --- a/test/repo-test.js +++ b/test/repo-test.js @@ -14,7 +14,11 @@ const MemoryLock = require('../src/locks/memory') const createBackend = require('./fixtures/create-backend') /** - * @param {import('../src/types').IPFSRepo} repo + * @typedef {import('../src/types').IPFSRepo} IPFSRepo + */ + +/** + * @param {IPFSRepo} repo */ module.exports = (repo) => { describe('IPFS Repo Tests', () => { diff --git a/test/types.test-d.ts b/test/types.test-d.ts new file mode 100644 index 00000000..d1edb342 --- /dev/null +++ b/test/types.test-d.ts @@ -0,0 +1,18 @@ +import { expectType } from 'tsd' +import type { IPFSRepo } from '../' +import { createRepo } from '../' +import { MemoryDatastore } from 'interface-datastore' +import { MemoryBlockstore } from 'interface-blockstore' + +expectType(createRepo('', async () => ({ + name: '', + code: 0, + encode: () => new Uint8Array(), + decode: () => {} +}), { + root: new MemoryDatastore(), + blocks: new MemoryBlockstore(), + keys: new MemoryDatastore(), + pins: new MemoryDatastore(), + datastore: new MemoryDatastore() +})) diff --git a/tsconfig.json b/tsconfig.json index ee88f58d..30f8bc4a 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -9,6 +9,7 @@ }, "include": [ "types", - "src" + "src", + "test" ] } From 627f0d3bb6bed333d2990d5d3fcb834d8e86432d Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 1 Jul 2021 11:17:13 +0100 Subject: [PATCH 18/25] chore: deps --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index f64510c6..4652e133 100644 --- a/package.json +++ b/package.json @@ -54,7 +54,7 @@ "@types/rimraf": "^3.0.0", "aegir": "^33.1.0", "assert": "^2.0.0", - "blockstore-datastore-adapter": "0.0.3", + "blockstore-datastore-adapter": "0.0.4", "events": "^3.3.0", "ipfs-utils": "^8.1.3", "it-all": "^1.0.2", @@ -75,7 +75,7 @@ "debug": "^4.1.0", "err-code": "^3.0.1", "eslint-plugin-ava": "^12.0.0", - "interface-blockstore": "^0.1.0", + "interface-blockstore": "^0.2.1", "interface-datastore": "^5.0.0", "ipfs-repo-migrations": "ipfs/js-ipfs-repo-migrations#chore/upgrade-multiformats", "it-filter": "^1.0.2", From 5668fe7dfea1c489d24d7dc416b40068c4cb005b Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 1 Jul 2021 12:38:41 +0100 Subject: [PATCH 19/25] chore: remove buffer reference --- src/utils/walk-dag.js | 2 +- test/pins-test.js | 7 +++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/utils/walk-dag.js b/src/utils/walk-dag.js index f2073f5a..41f0ab48 100644 --- a/src/utils/walk-dag.js +++ b/src/utils/walk-dag.js @@ -49,7 +49,7 @@ async function * walkDag (cid, blockstore, loadCodec, options) { * @returns {Generator<[string, CID], void, undefined>} */ function * dagCborLinks (obj, path = [], parseBuffer = true) { - if (parseBuffer && Buffer.isBuffer(obj)) { + if (parseBuffer && obj instanceof Uint8Array) { obj = cborg.decode(obj) } diff --git a/test/pins-test.js b/test/pins-test.js index edf4f46e..703b561e 100644 --- a/test/pins-test.js +++ b/test/pins-test.js @@ -159,9 +159,12 @@ module.exports = (repo) => { }) it('lists indirectly pinned cbor keys', async () => { - const child = await createDagCborNode() + const child = await createDagCborNode({ + data: Math.random() + }) const parent = await createDagCborNode({ - child: { '/': child.cid } + child: child.cid, + data: Math.random() }) await repo.blocks.put(child.cid, child.buf) From 30f0639ab77f72244f1cbeac62b2d1ab34ed416a Mon Sep 17 00:00:00 2001 From: achingbrain Date: Mon, 5 Jul 2021 13:29:07 +0100 Subject: [PATCH 20/25] chore: throw error if walking dag fails --- src/utils/walk-dag.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/utils/walk-dag.js b/src/utils/walk-dag.js index 41f0ab48..4cc06360 100644 --- a/src/utils/walk-dag.js +++ b/src/utils/walk-dag.js @@ -38,6 +38,8 @@ async function * walkDag (cid, blockstore, loadCodec, options) { } } catch (err) { log('Could not walk DAG for CID', cid.toString(), err) + + throw err } } From d094feab252155025d0fbde14ed0d0d5930311bd Mon Sep 17 00:00:00 2001 From: achingbrain Date: Wed, 7 Jul 2021 18:01:06 +0100 Subject: [PATCH 21/25] chore: remove gh dep versions --- package.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index 4652e133..8f5ab077 100644 --- a/package.json +++ b/package.json @@ -54,7 +54,7 @@ "@types/rimraf": "^3.0.0", "aegir": "^33.1.0", "assert": "^2.0.0", - "blockstore-datastore-adapter": "0.0.4", + "blockstore-datastore-adapter": "^1.0.0", "events": "^3.3.0", "ipfs-utils": "^8.1.3", "it-all": "^1.0.2", @@ -75,9 +75,9 @@ "debug": "^4.1.0", "err-code": "^3.0.1", "eslint-plugin-ava": "^12.0.0", - "interface-blockstore": "^0.2.1", + "interface-blockstore": "^1.0.0", "interface-datastore": "^5.0.0", - "ipfs-repo-migrations": "ipfs/js-ipfs-repo-migrations#chore/upgrade-multiformats", + "ipfs-repo-migrations": "^9.0.0", "it-filter": "^1.0.2", "it-map": "^1.0.5", "it-merge": "^1.0.2", From 526bf778078a17acb5ab78bce67fcd4e15ddf490 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 9 Jul 2021 12:53:22 +0100 Subject: [PATCH 22/25] chore: upgrade aegir --- .github/workflows/main.yml | 2 +- package.json | 2 +- test/blockstore-test.js | 12 ++++-------- test/datastore-test.js | 8 +++----- 4 files changed, 9 insertions(+), 15 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 398e9884..5b9129af 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -61,7 +61,7 @@ jobs: - uses: actions/checkout@v2 - uses: microsoft/playwright-github-action@v1 - run: npm install - - run: npx aegir test -t browser -t webworker --bail --timeout 10000 -- --browser webkit + - run: npx aegir test -t browser -t webworker --bail -- --browser webkit # test-electron-main: # needs: check # runs-on: ubuntu-latest diff --git a/package.json b/package.json index 8f5ab077..a42e710e 100644 --- a/package.json +++ b/package.json @@ -52,7 +52,7 @@ "@types/debug": "^4.1.5", "@types/proper-lockfile": "^4.1.1", "@types/rimraf": "^3.0.0", - "aegir": "^33.1.0", + "aegir": "^34.0.0", "assert": "^2.0.0", "blockstore-datastore-adapter": "^1.0.0", "events": "^3.3.0", diff --git a/test/blockstore-test.js b/test/blockstore-test.js index 41510e7c..df97515b 100644 --- a/test/blockstore-test.js +++ b/test/blockstore-test.js @@ -90,16 +90,14 @@ module.exports = (repo) => { await repo.blocks.put(CID.createV0(digest), d) }) - it('massive multiwrite', async function () { - this.timeout(15000) // add time for ci + it('massive multiwrite', async () => { const hashes = await Promise.all(range(100).map((i) => sha256.digest(blockData[i]))) await Promise.all(range(100).map((i) => { return repo.blocks.put(CID.createV0(hashes[i]), blockData[i]) })) }) - it('.putMany', async function () { - this.timeout(15000) // add time for ci + it('.putMany', async () => { const blocks = await Promise.all(range(50).map(async (i) => { const d = uint8ArrayFromString('many' + Math.random()) const digest = await sha256.digest(d) @@ -152,8 +150,7 @@ module.exports = (repo) => { expect(block).to.equalBytes(pair.value) }) - it('massive read', async function () { - this.timeout(15000) // add time for ci + it('massive read', async () => { await Promise.all(range(20 * 100).map(async (i) => { const j = i % blockData.length const digest = await sha256.digest(blockData[j]) @@ -264,8 +261,7 @@ module.exports = (repo) => { expect(blocks).to.deep.include(identityData) }) - it('massive read', async function () { - this.timeout(15000) // add time for ci + it('massive read', async () => { const num = 20 * 100 const blocks = await all(repo.blocks.getMany(async function * () { diff --git a/test/datastore-test.js b/test/datastore-test.js index 3ef811cd..3d72fc6f 100644 --- a/test/datastore-test.js +++ b/test/datastore-test.js @@ -25,8 +25,7 @@ module.exports = (repo) => { await Promise.all([repo.datastore.put(b, data), repo.datastore.put(b, data)]) }) - it('massive multiwrite', async function () { - this.timeout(15000) // add time for ci + it('massive multiwrite', async () => { await Promise.all(range(100).map((i) => { return repo.datastore.put(new Key('hello' + i), dataList[i]) })) @@ -39,14 +38,13 @@ module.exports = (repo) => { expect(val).to.be.eql(data) }) - it('massive read', async function () { - this.timeout(15000) // add time for ci + it('massive read', async () => { await Promise.all(range(20 * 100).map(async (i) => { const j = i % dataList.length const val = await repo.datastore.get(new Key('hello' + j)) expect(val).to.be.eql(dataList[j]) })) - }).timeout(10 * 1000) + }) }) describe('.has', () => { From e95a6602b51b7273bed0e2839efceb66242118f8 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 9 Jul 2021 12:55:22 +0100 Subject: [PATCH 23/25] chore: update bundle size --- .aegir.js | 2 +- .github/workflows/main.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.aegir.js b/.aegir.js index 638e5694..23b14d8a 100644 --- a/.aegir.js +++ b/.aegir.js @@ -3,6 +3,6 @@ /** @type {import('aegir').PartialOptions} */ module.exports = { build: { - bundlesizeMax: '93kB' + bundlesizeMax: '47kB' } } diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 5b9129af..03c88726 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -61,7 +61,7 @@ jobs: - uses: actions/checkout@v2 - uses: microsoft/playwright-github-action@v1 - run: npm install - - run: npx aegir test -t browser -t webworker --bail -- --browser webkit + - run: npx aegir test -t browser -t webworker --bail -- --browser webkit # test-electron-main: # needs: check # runs-on: ubuntu-latest From 7e94baadffc82de35a10b41ba39eeb57932f3e43 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 9 Jul 2021 12:58:52 +0100 Subject: [PATCH 24/25] chore: tests are much faster now --- test/blockstore-test.js | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/test/blockstore-test.js b/test/blockstore-test.js index df97515b..27c30def 100644 --- a/test/blockstore-test.js +++ b/test/blockstore-test.js @@ -167,7 +167,7 @@ module.exports = (repo) => { }) it('should get block stored under v0 CID with a v1 CID', async () => { - const data = uint8ArrayFromString(`TEST${Date.now()}`) + const data = uint8ArrayFromString(`TEST${Math.random()}`) const digest = await sha256.digest(data) const cid = CID.createV0(digest) await repo.blocks.put(cid, data) @@ -176,7 +176,7 @@ module.exports = (repo) => { }) it('should get block stored under v1 CID with a v0 CID', async () => { - const data = uint8ArrayFromString(`TEST${Date.now()}`) + const data = uint8ArrayFromString(`TEST${Math.random()}`) const digest = await sha256.digest(data) const cid = CID.createV1(dagPb.code, digest) @@ -191,7 +191,7 @@ module.exports = (repo) => { }) it('throws ERR_NOT_FOUND when requesting non-dag-pb CID that is not in the store', async () => { - const data = uint8ArrayFromString(`TEST${Date.now()}`) + const data = uint8ArrayFromString(`TEST${Math.random()}`) const digest = await sha256.digest(data) const cid = CID.createV1(dagPb.code, digest) @@ -200,7 +200,7 @@ module.exports = (repo) => { it('throws unknown error encountered when getting a block', async () => { const err = new Error('wat') - const data = uint8ArrayFromString(`TEST${Date.now()}`) + const data = uint8ArrayFromString(`TEST${Math.random()}`) const digest = await sha256.digest(data) const cid = CID.createV0(digest) @@ -287,7 +287,7 @@ module.exports = (repo) => { }) it('should get block stored under v0 CID with a v1 CID', async () => { - const data = uint8ArrayFromString(`TEST${Date.now()}`) + const data = uint8ArrayFromString(`TEST${Math.random()}`) const digest = await sha256.digest(data) const cid = CID.createV0(digest) await repo.blocks.put(cid, data) @@ -296,7 +296,7 @@ module.exports = (repo) => { }) it('should get block stored under v1 CID with a v0 CID', async () => { - const data = uint8ArrayFromString(`TEST${Date.now()}`) + const data = uint8ArrayFromString(`TEST${Math.random()}`) const digest = await sha256.digest(data) const cid = CID.createV1(dagPb.code, digest) @@ -311,7 +311,7 @@ module.exports = (repo) => { }) it('throws ERR_NOT_FOUND when requesting non-dag-pb CID that is not in the store', async () => { - const data = uint8ArrayFromString(`TEST${Date.now()}`) + const data = uint8ArrayFromString(`TEST${Math.random()}`) const digest = await sha256.digest(data) const cid = CID.createV1(dagCbor.code, digest) @@ -320,7 +320,7 @@ module.exports = (repo) => { it('throws unknown error encountered when getting a block', async () => { const err = new Error('wat') - const data = uint8ArrayFromString(`TEST${Date.now()}`) + const data = uint8ArrayFromString(`TEST${Math.random()}`) const digest = await sha256.digest(data) const cid = CID.createV0(digest) @@ -379,7 +379,7 @@ module.exports = (repo) => { }) it('should have block stored under v0 CID with a v1 CID', async () => { - const data = uint8ArrayFromString(`TEST${Date.now()}`) + const data = uint8ArrayFromString(`TEST${Math.random()}`) const digest = await sha256.digest(data) const cid = CID.createV0(digest) await repo.blocks.put(cid, data) @@ -388,7 +388,7 @@ module.exports = (repo) => { }) it('should have block stored under v1 CID with a v0 CID', async () => { - const data = uint8ArrayFromString(`TEST${Date.now()}`) + const data = uint8ArrayFromString(`TEST${Math.random()}`) const digest = await sha256.digest(data) const cid = CID.createV1(dagCbor.code, digest) @@ -403,7 +403,7 @@ module.exports = (repo) => { }) it('returns false when requesting non-dag-pb CID that is not in the store', async () => { - const data = uint8ArrayFromString(`TEST${Date.now()}`) + const data = uint8ArrayFromString(`TEST${Math.random()}`) const digest = await sha256.digest(data) const cid = CID.createV1(dagCbor.code, digest) const result = await repo.blocks.has(cid) From d6204f8c4f53a55b0e3dd3ff98d3049361ca846f Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 9 Jul 2021 13:06:29 +0100 Subject: [PATCH 25/25] chore: support batch in idstore --- src/idstore.js | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/src/idstore.js b/src/idstore.js index 974dea4d..60cd387e 100644 --- a/src/idstore.js +++ b/src/idstore.js @@ -119,10 +119,30 @@ function createIdStore (store) { }, batch () { + const batch = store.batch() + return { - put (key, value) { }, - delete (key) { }, - commit: async (options) => { } + put (cid, buf) { + const { isIdentity } = extractContents(cid) + + if (isIdentity) { + return + } + + batch.put(cid, buf) + }, + delete (cid) { + const { isIdentity } = extractContents(cid) + + if (isIdentity) { + return + } + + batch.delete(cid) + }, + commit: (options) => { + return batch.commit(options) + } } } }