From 99d741012ef55b8f657d2a9b3255fc7478b90d97 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 19 Sep 2019 14:36:30 +0100 Subject: [PATCH] chore: convert internals to promises --- .aegir.js | 2 +- .travis.yml | 7 + examples/traverse-ipld-graphs/create-node.js | 3 +- examples/traverse-ipld-graphs/eth.js | 3 +- examples/traverse-ipld-graphs/git.js | 3 +- package.json | 40 +- src/cli/utils.js | 2 +- src/core/boot.js | 102 ++--- src/core/components/bitswap.js | 68 ++- src/core/components/block.js | 126 +++--- src/core/components/bootstrap.js | 7 +- src/core/components/config.js | 4 +- src/core/components/dag.js | 119 ++---- src/core/components/dht.js | 77 ++-- src/core/components/dns.js | 16 +- src/core/components/files-mfs.js | 4 +- .../components/files-regular/add-from-url.js | 5 +- .../files-regular/refs-local-pull-stream.js | 18 +- .../files-regular/refs-pull-stream.js | 4 +- src/core/components/files-regular/utils.js | 4 +- src/core/components/id.js | 14 +- src/core/components/init-assets.js | 41 +- src/core/components/init.js | 32 +- src/core/components/key.js | 47 ++- src/core/components/name-pubsub.js | 30 +- src/core/components/name.js | 98 ++--- src/core/components/object.js | 388 ++++++------------ src/core/components/pin.js | 347 ++++++++-------- src/core/components/pin/gc-lock.js | 12 +- src/core/components/pin/gc.js | 173 ++++---- src/core/components/pin/pin-manager.js | 329 +++++++-------- src/core/components/pin/pin-set.js | 242 ++++++----- src/core/components/pre-start.js | 10 +- src/core/components/pubsub.js | 90 ++-- src/core/components/repo.js | 30 +- src/core/components/resolve.js | 7 +- src/core/components/start.js | 52 +-- src/core/components/stats.js | 79 ++-- src/core/components/stop.js | 28 +- src/core/components/swarm.js | 42 +- src/core/components/version.js | 25 +- src/core/index.js | 1 - src/core/ipns/index.js | 4 +- src/core/ipns/path.js | 12 +- src/core/ipns/publisher.js | 10 +- src/core/ipns/republisher.js | 180 ++++---- src/core/ipns/resolver.js | 157 ++++--- src/core/ipns/routing/offline-datastore.js | 101 +++-- src/core/ipns/routing/pubsub-datastore.js | 83 ++-- src/core/mfs-preload.js | 54 ++- src/core/runtime/add-from-fs-browser.js | 2 +- src/core/runtime/add-from-fs-nodejs.js | 2 +- src/core/runtime/dns-browser.js | 13 +- src/core/runtime/dns-nodejs.js | 91 ++-- src/core/runtime/ipld-browser-all.js | 4 +- src/core/runtime/ipld-browser.js | 4 +- src/core/runtime/ipld-nodejs.js | 4 +- src/core/runtime/preload-nodejs.js | 2 +- src/core/utils.js | 63 ++- src/http/api/resources/bitswap.js | 6 +- src/http/api/resources/block.js | 4 +- src/http/api/resources/dag.js | 10 +- src/http/api/resources/files-regular.js | 4 +- src/http/api/resources/object.js | 18 +- src/http/api/resources/pin.js | 6 +- src/http/api/resources/resolve.js | 2 +- src/utils/mutex.js | 30 +- test/bootstrapers.js | 6 +- test/cli/bitswap.js | 5 - test/cli/daemon.js | 8 +- test/cli/dht.js | 5 +- test/cli/name-pubsub.js | 13 +- test/cli/ping.js | 5 +- test/cli/pubsub.js | 10 +- test/cli/swarm.js | 5 +- test/core/bitswap.spec.js | 28 +- test/core/block.spec.js | 7 +- test/core/bootstrap.spec.js | 5 +- test/core/circuit-relay.js | 6 +- test/core/dag.spec.js | 5 +- test/core/dht.spec.js | 5 +- test/core/files-sharding.spec.js | 5 +- test/core/files.spec.js | 5 +- test/core/gc-lock.spec.js | 83 ++-- test/core/gc.spec.js | 24 +- test/core/init.spec.js | 50 +-- test/core/kad-dht.node.js | 5 +- test/core/mfs-preload.spec.js | 52 ++- test/core/name-pubsub.js | 12 +- test/core/name.spec.js | 188 ++++----- test/core/object.spec.js | 5 +- test/core/pin-set.js | 13 +- test/core/pin.js | 81 +++- test/core/pin.spec.js | 5 +- test/core/ping.spec.js | 6 +- test/core/stats.spec.js | 5 +- test/core/swarm.spec.js | 5 +- test/core/utils.js | 74 ++-- test/http-api/block.js | 5 +- test/http-api/bootstrap.js | 5 +- test/http-api/config.js | 5 +- test/http-api/dns.js | 5 +- test/http-api/files.js | 5 +- test/http-api/id.js | 5 +- test/node.js | 6 +- test/utils/create-repo-browser.js | 20 +- test/utils/create-repo-nodejs.js | 16 +- test/utils/interface-common-factory.js | 3 +- test/utils/ipfs-exec.js | 17 +- test/utils/on-and-off.js | 9 +- 110 files changed, 2052 insertions(+), 2377 deletions(-) diff --git a/.aegir.js b/.aegir.js index da26b81e79..fa929fea50 100644 --- a/.aegir.js +++ b/.aegir.js @@ -10,7 +10,7 @@ const preloadNode = MockPreloadNode.createNode() const echoServer = EchoServer.createServer() module.exports = { - bundlesize: { maxSize: '696kB' }, + bundlesize: { maxSize: '683kB' }, webpack: { resolve: { mainFields: ['browser', 'main'], diff --git a/.travis.yml b/.travis.yml index 208c521257..0250f38df7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -28,6 +28,13 @@ env: # https://travis-ci.community/t/timeout-after-build-finished-and-succeeded/1336 - YARN_GPG=no +addons: + apt: + packages: + # Fixes error while loading shared libraries: libgconf-2.so.4: cannot open shared object file: No such file or directory + # https://github.com/electron/electron/issues/1518 + - libgconf-2-4 + script: npx nyc -s npx aegir test -t node --timeout 10000 --bail after_success: - npx nyc report --reporter=text-lcov > coverage.lcov && npx codecov diff --git a/examples/traverse-ipld-graphs/create-node.js b/examples/traverse-ipld-graphs/create-node.js index dd37f865ef..ecd7311c56 100644 --- a/examples/traverse-ipld-graphs/create-node.js +++ b/examples/traverse-ipld-graphs/create-node.js @@ -4,7 +4,8 @@ const IPFS = require('../../src/core') // In your project, replace by the following line and install IPFS as a dep // const IPFS = require('ipfs') -function createNode (options = {}) { +function createNode (options) { + options = options || {} options.path = options.path || '/tmp/ipfs' + Math.random() return IPFS.create({ repo: options.path }) } diff --git a/examples/traverse-ipld-graphs/eth.js b/examples/traverse-ipld-graphs/eth.js index 55f4566fe0..10582eee2f 100644 --- a/examples/traverse-ipld-graphs/eth.js +++ b/examples/traverse-ipld-graphs/eth.js @@ -6,7 +6,6 @@ const multihashing = require('multihashing-async') const Block = require('ipfs-block') const CID = require('cids') const fs = require('fs').promises -const { promisify } = require('util') async function main () { const ipfs = await createNode() @@ -20,7 +19,7 @@ async function main () { for (const ethBlockPath of ethBlocks) { const data = await fs.readFile(ethBlockPath) - const multihash = await promisify(multihashing)(data, 'keccak-256') + const multihash = await multihashing(data, 'keccak-256') const cid = new CID(1, 'eth-block', multihash) // console.log(cid.toBaseEncodedString()) diff --git a/examples/traverse-ipld-graphs/git.js b/examples/traverse-ipld-graphs/git.js index 20881b92a9..c3f78ff796 100644 --- a/examples/traverse-ipld-graphs/git.js +++ b/examples/traverse-ipld-graphs/git.js @@ -6,7 +6,6 @@ const multihashing = require('multihashing-async') const Block = require('ipfs-block') const CID = require('cids') const fs = require('fs').promises -const { promisify } = require('util') async function main () { const ipfs = await createNode() @@ -28,7 +27,7 @@ async function main () { await Promise.all(gitObjects.map(async gitObjectsPath => { const data = await fs.readFile(gitObjectsPath) - const multihash = await promisify(multihashing)(data, 'sha1') + const multihash = await multihashing(data, 'sha1') const cid = new CID(1, 'git-raw', multihash) console.log(cid.toString()) diff --git a/package.json b/package.json index 962f70fe1d..e2c79bd52e 100644 --- a/package.json +++ b/package.json @@ -63,7 +63,7 @@ "@hapi/ammo": "^3.1.1", "@hapi/boom": "^7.4.3", "@hapi/hapi": "^18.3.2", - "@hapi/joi": "^15.0.1", + "@hapi/joi": "^15.0.0", "array-shuffle": "^1.0.1", "async": "^2.6.1", "async-iterator-all": "^1.0.0", @@ -76,11 +76,13 @@ "bs58": "^4.0.1", "buffer-peek-stream": "^1.0.1", "byteman": "^1.3.5", + "callbackify": "^1.1.0", "cid-tool": "~0.3.0", "cids": "~0.7.1", "class-is": "^1.1.0", + "dag-cbor-links": "^1.3.0", "datastore-core": "~0.7.0", - "datastore-pubsub": "~0.1.1", + "datastore-pubsub": "^0.2.1", "debug": "^4.1.0", "dlv": "^1.1.3", "err-code": "^2.0.0", @@ -94,14 +96,14 @@ "hashlru": "^2.3.0", "human-to-milliseconds": "^2.0.0", "interface-datastore": "~0.7.0", - "ipfs-bitswap": "~0.25.1", + "ipfs-bitswap": "^0.26.0", "ipfs-block": "~0.8.1", "ipfs-block-service": "~0.16.0", - "ipfs-http-client": "^37.0.2", + "ipfs-http-client": "^38.0.0", "ipfs-http-response": "~0.3.1", "ipfs-mfs": "^0.13.0", "ipfs-multipart": "^0.2.0", - "ipfs-repo": "~0.27.0", + "ipfs-repo": "^0.28.0", "ipfs-unixfs": "~0.1.16", "ipfs-unixfs-exporter": "^0.38.0", "ipfs-unixfs-importer": "^0.40.0", @@ -114,7 +116,7 @@ "ipld-git": "~0.5.0", "ipld-raw": "^4.0.0", "ipld-zcash": "~0.3.0", - "ipns": "~0.6.0", + "ipns": "^0.6.1", "is-domain-name": "^1.0.1", "is-ipfs": "~0.6.1", "is-pull-stream": "~0.0.0", @@ -124,41 +126,43 @@ "it-to-stream": "^0.1.1", "just-safe-set": "^2.1.0", "kind-of": "^6.0.2", - "ky": "~0.14.0", + "ky": "^0.14.0", "ky-universal": "~0.3.0", - "libp2p": "~0.26.1", - "libp2p-bootstrap": "~0.10.2", - "libp2p-crypto": "~0.17.0", + "libp2p": "^0.26.2", + "libp2p-bootstrap": "~0.9.3", + "libp2p-crypto": "^0.16.2", "libp2p-delegated-content-routing": "^0.3.1", "libp2p-delegated-peer-routing": "^0.3.1", "libp2p-floodsub": "^0.18.0", "libp2p-gossipsub": "~0.0.5", "libp2p-kad-dht": "~0.16.0", - "libp2p-keychain": "~0.5.0", + "libp2p-keychain": "^0.5.1", "libp2p-mdns": "~0.12.0", "libp2p-record": "~0.7.0", "libp2p-secio": "~0.11.0", - "libp2p-tcp": "~0.13.1", + "libp2p-tcp": "^0.13.0", "libp2p-webrtc-star": "~0.16.0", "libp2p-websocket-star-multi": "~0.4.3", "libp2p-websockets": "~0.12.3", "lodash": "^4.17.15", + "lodash.flatten": "^4.4.0", "mafmt": "^6.0.10", "merge-options": "^1.0.1", "mime-types": "^2.1.21", "mkdirp": "~0.5.1", "mortice": "^2.0.0", - "multiaddr": "^6.1.0", + "multiaddr": "^6.1.1", "multiaddr-to-uri": "^5.0.0", "multibase": "~0.6.0", "multicodec": "~0.5.5", "multihashes": "~0.4.14", - "multihashing-async": "~0.7.0", + "multihashing-async": "^0.8.0", "node-fetch": "^2.3.0", + "p-iteration": "^1.1.8", "p-queue": "^6.1.0", - "peer-book": "~0.9.0", - "peer-id": "~0.13.2", - "peer-info": "~0.16.0", + "peer-book": "^0.9.1", + "peer-id": "^0.12.2", + "peer-info": "~0.15.1", "progress": "^2.0.1", "promise-nodeify": "^3.0.1", "promisify-es6": "^1.0.3", @@ -200,7 +204,7 @@ "execa": "^2.0.4", "form-data": "^2.5.1", "hat": "0.0.3", - "interface-ipfs-core": "~0.114.0", + "interface-ipfs-core": "^0.115.0", "ipfs-interop": "~0.1.0", "ipfsd-ctl": "^0.47.2", "libp2p-websocket-star": "~0.10.2", diff --git a/src/cli/utils.js b/src/cli/utils.js index 14c00cb038..842acd3d61 100644 --- a/src/cli/utils.js +++ b/src/cli/utils.js @@ -55,7 +55,7 @@ exports.getIPFS = (argv, callback) => { const cleanup = callbackify(async () => { if (node && node._repo && !node._repo.closed) { - return node._repo.close() + await node._repo.close() } }) diff --git a/src/core/boot.js b/src/core/boot.js index 96fabbe48f..aa47be16af 100644 --- a/src/core/boot.js +++ b/src/core/boot.js @@ -1,86 +1,72 @@ 'use strict' -const waterfall = require('async/waterfall') const RepoErrors = require('ipfs-repo').errors // Boot an IPFS node depending on the options set -module.exports = (self) => { +module.exports = async (self) => { self.log('booting') const options = self._options const doInit = options.init const doStart = options.start - // Do the actual boot sequence - waterfall([ - // Checks if a repo exists, and if so opens it - // Will return callback with a bool indicating the existence - // of the repo - // TODO vmx 2019-08-05: THIS WON'T WORK IN THE BROWSER due to transpiling, this needs a proper fix. This is just a hack to keep things moving - async () => { - // nothing to do - if (!self._repo.closed) { - return true - } + // Checks if a repo exists, and if so opens it + // Will return callback with a bool indicating the existence + // of the repo + async function repoOpened () { + // nothing to do + if (!self._repo.closed) { + return true + } - try { - const res = await self._repo.open() - } catch (err) { - if (isRepoUninitializedError(err)) return false - if (err) throw err + try { + await self._repo.open() + } catch (err) { + if (isRepoUninitializedError(err)) { + return false } - return true - }, - (repoOpened, cb) => { - // Init with existing initialized, opened, repo - if (repoOpened) { - return self.init({ repo: self._repo }, (err) => { - if (err) return cb(Object.assign(err, { emitted: true })) - cb() - }) + if (err) { + throw err } + } - if (doInit) { - const initOptions = Object.assign( - { bits: 2048, pass: self._options.pass }, - typeof options.init === 'object' ? options.init : {} - ) - return self.init(initOptions, (err) => { - if (err) return cb(Object.assign(err, { emitted: true })) - cb() - }) - } + return true + } - cb() - }, - (cb) => { - // No problem, we don't have to start the node - if (!doStart) { - return cb() + // Do the actual boot sequence + try { + // Init with existing initialized, opened, repo + if (await repoOpened()) { + try { + await self.init({ repo: self._repo }) + } catch (err) { + throw Object.assign(err, { emitted: true }) + } + } else if (doInit) { + const defaultInitOptions = { + bits: 2048, + pass: self._options.pass } - self.start((err) => { - if (err) return cb(Object.assign(err, { emitted: true })) - cb() - }) + const initOptions = Object.assign(defaultInitOptions, typeof options.init === 'object' ? options.init : {}) + + await self.init(initOptions) } - ], (err) => { - if (err) { - if (!err.emitted) { - self.emit('error', err) - } - return + + if (doStart) { + await self.start() } + self.log('booted') self.emit('ready') - }) + } catch (err) { + if (!err.emitted) { + self.emit('error', err) + } + } } function isRepoUninitializedError (err) { - if (!err) { - return false - } - // If the error is that no repo exists, // which happens when the version file is not found // we just want to signal that no repo exist, not diff --git a/src/core/components/bitswap.js b/src/core/components/bitswap.js index c25bebacc3..654f9f045b 100644 --- a/src/core/components/bitswap.js +++ b/src/core/components/bitswap.js @@ -1,8 +1,7 @@ 'use strict' const OFFLINE_ERROR = require('../utils').OFFLINE_ERROR -const promisify = require('promisify-es6') -const setImmediate = require('async/setImmediate') +const callbackify = require('callbackify') const Big = require('bignumber.js') const CID = require('cids') const PeerId = require('peer-id') @@ -14,59 +13,47 @@ function formatWantlist (list, cidBase) { module.exports = function bitswap (self) { return { - wantlist: promisify((peerId, callback) => { - if (typeof peerId === 'function') { - callback = peerId - peerId = null - } - + wantlist: callbackify.variadic(async (peerId) => { // eslint-disable-line require-await if (!self.isOnline()) { - return setImmediate(() => callback(new Error(OFFLINE_ERROR))) + throw new Error(OFFLINE_ERROR) } let list + if (peerId) { - try { - peerId = PeerId.createFromB58String(peerId) - } catch (e) { - peerId = null - } - if (!peerId) { - return setImmediate(() => callback(new Error('Invalid peerId'))) - } + peerId = PeerId.createFromB58String(peerId) + list = self._bitswap.wantlistForPeer(peerId) } else { list = self._bitswap.getWantlist() } - setImmediate(() => callback(null, { Keys: formatWantlist(list) })) + return { Keys: formatWantlist(list) } }), - stat: promisify((callback) => { + stat: callbackify(async () => { // eslint-disable-line require-await if (!self.isOnline()) { - return setImmediate(() => callback(new Error(OFFLINE_ERROR))) + throw new Error(OFFLINE_ERROR) } const snapshot = self._bitswap.stat().snapshot - setImmediate(() => { - callback(null, { - provideBufLen: parseInt(snapshot.providesBufferLength.toString()), - blocksReceived: new Big(snapshot.blocksReceived), - wantlist: formatWantlist(self._bitswap.getWantlist()), - peers: self._bitswap.peers().map((id) => id.toB58String()), - dupBlksReceived: new Big(snapshot.dupBlksReceived), - dupDataReceived: new Big(snapshot.dupDataReceived), - dataReceived: new Big(snapshot.dataReceived), - blocksSent: new Big(snapshot.blocksSent), - dataSent: new Big(snapshot.dataSent) - }) - }) + return { + provideBufLen: parseInt(snapshot.providesBufferLength.toString()), + blocksReceived: new Big(snapshot.blocksReceived), + wantlist: formatWantlist(self._bitswap.getWantlist()), + peers: self._bitswap.peers().map((id) => id.toB58String()), + dupBlksReceived: new Big(snapshot.dupBlksReceived), + dupDataReceived: new Big(snapshot.dupDataReceived), + dataReceived: new Big(snapshot.dataReceived), + blocksSent: new Big(snapshot.blocksSent), + dataSent: new Big(snapshot.dataSent) + } }), - unwant: promisify((keys, callback) => { + unwant: callbackify(async (keys) => { // eslint-disable-line require-await if (!self.isOnline()) { - return setImmediate(() => callback(new Error(OFFLINE_ERROR))) + throw new Error(OFFLINE_ERROR) } if (!Array.isArray(keys)) { @@ -74,17 +61,12 @@ module.exports = function bitswap (self) { } try { - keys = keys.map((key) => { - if (CID.isCID(key)) { - return key - } - return new CID(key) - }) + keys = keys.map((key) => new CID(key)) } catch (err) { - return setImmediate(() => callback(errCode(err, 'ERR_INVALID_CID'))) + throw errCode(err, 'ERR_INVALID_CID') } - setImmediate(() => callback(null, self._bitswap.unwant(keys))) + return self._bitswap.unwant(keys) }) } } diff --git a/src/core/components/block.js b/src/core/components/block.js index 305ad2df3d..205dcedcdc 100644 --- a/src/core/components/block.js +++ b/src/core/components/block.js @@ -3,61 +3,40 @@ const Block = require('ipfs-block') const multihashing = require('multihashing-async') const CID = require('cids') -const waterfall = require('async/waterfall') -const setImmediate = require('async/setImmediate') -const promisify = require('promisify-es6') +const callbackify = require('callbackify') const errCode = require('err-code') module.exports = function block (self) { return { - get: promisify((cid, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} - } - + get: callbackify.variadic(async (cid, options) => { // eslint-disable-line require-await options = options || {} try { cid = cleanCid(cid) } catch (err) { - return setImmediate(() => callback(errCode(err, 'ERR_INVALID_CID'))) + throw errCode(err, 'ERR_INVALID_CID') } if (options.preload !== false) { self._preload(cid) } - self._blockService.get(cid, callback) + return self._blockService.get(cid) }), - put: promisify((block, options, callback) => { - callback = callback || function noop () {} - - if (typeof options === 'function') { - callback = options - options = {} - } - + put: callbackify.variadic(async (block, options) => { options = options || {} if (Array.isArray(block)) { - return callback(new Error('Array is not supported')) + throw new Error('Array is not supported') } - waterfall([ - (cb) => { - if (Block.isBlock(block)) { - return cb(null, block) - } - - if (options.cid && CID.isCID(options.cid)) { - return cb(null, new Block(block, options.cid)) - } - + if (!Block.isBlock(block)) { + if (options.cid && CID.isCID(options.cid)) { + block = new Block(block, options.cid) + } else { const mhtype = options.mhtype || 'sha2-256' const format = options.format || 'dag-pb' let cidVersion - // const mhlen = options.mhlen || 0 if (options.version == null) { // Pick appropriate CID version @@ -66,74 +45,63 @@ module.exports = function block (self) { cidVersion = options.version } - multihashing(block, mhtype, (err, multihash) => { - if (err) { - return cb(err) - } - - let cid - try { - cid = new CID(cidVersion, format, multihash) - } catch (err) { - return cb(err) - } - - cb(null, new Block(block, cid)) - }) - }, - (block, cb) => self._gcLock.readLock((_cb) => { - self._blockService.put(block, (err) => { - if (err) { - return _cb(err) - } - - if (options.preload !== false) { - self._preload(block.cid) - } - - _cb(null, block) - }) - }, cb) - ], callback) + const multihash = await multihashing(block, mhtype) + const cid = new CID(cidVersion, format, multihash) + + block = new Block(block, cid) + } + } + + const release = await self._gcLock.readLock() + + try { + await self._blockService.put(block) + + if (options.preload !== false) { + self._preload(block.cid) + } + + return block + } finally { + release() + } }), - rm: promisify((cid, callback) => { + rm: callbackify(async (cid) => { try { cid = cleanCid(cid) } catch (err) { - return setImmediate(() => callback(errCode(err, 'ERR_INVALID_CID'))) + throw errCode(err, 'ERR_INVALID_CID') } // We need to take a write lock here to ensure that adding and removing // blocks are exclusive operations - self._gcLock.writeLock((cb) => self._blockService.delete(cid, cb), callback) - }), - stat: promisify((cid, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} - } + const release = await self._gcLock.writeLock() + try { + await self._blockService.delete(cid) + } finally { + release() + } + }), + stat: callbackify.variadic(async (cid, options) => { options = options || {} try { cid = cleanCid(cid) } catch (err) { - return setImmediate(() => callback(errCode(err, 'ERR_INVALID_CID'))) + throw errCode(err, 'ERR_INVALID_CID') } if (options.preload !== false) { self._preload(cid) } - self._blockService.get(cid, (err, block) => { - if (err) { - return callback(err) - } - callback(null, { - key: cid.toString(), - size: block.data.length - }) - }) + const block = await self._blockService.get(cid) + + return { + key: cid.toString(), + size: block.data.length + } }) } } diff --git a/src/core/components/bootstrap.js b/src/core/components/bootstrap.js index f453636433..156ecc1134 100644 --- a/src/core/components/bootstrap.js +++ b/src/core/components/bootstrap.js @@ -20,14 +20,15 @@ module.exports = function bootstrap (self) { return { list: callbackify(async () => { const config = await self._repo.config.get() + return { Peers: config.Bootstrap } }), - add: callbackify(async (multiaddr, args = { default: false }) => { + add: callbackify.variadic(async (multiaddr, args = { default: false }) => { if (multiaddr && !isValidMultiaddr(multiaddr)) { throw invalidMultiaddrError(multiaddr) } - const config = self._repo.config.get() + const config = await self._repo.config.get() if (args.default) { config.Bootstrap = defaultConfig().Bootstrap } else if (multiaddr && config.Bootstrap.indexOf(multiaddr) === -1) { @@ -39,7 +40,7 @@ module.exports = function bootstrap (self) { Peers: args.default ? defaultConfig().Bootstrap : [multiaddr] } }), - rm: callbackify(async (multiaddr, args = { all: false }) => { + rm: callbackify.variadic(async (multiaddr, args = { all: false }) => { if (multiaddr && !isValidMultiaddr(multiaddr)) { throw invalidMultiaddrError(multiaddr) } diff --git a/src/core/components/config.js b/src/core/components/config.js index 7206eab268..23d31a1d8a 100644 --- a/src/core/components/config.js +++ b/src/core/components/config.js @@ -4,8 +4,8 @@ const callbackify = require('callbackify') module.exports = function config (self) { return { - get: callbackify(self._repo.config.get), + get: callbackify.variadic(self._repo.config.get), set: callbackify(self._repo.config.set), - replace: callbackify(self._repo.config.set) + replace: callbackify.variadic(self._repo.config.set) } } diff --git a/src/core/components/dag.js b/src/core/components/dag.js index 0f2f90a563..af0d745e2c 100644 --- a/src/core/components/dag.js +++ b/src/core/components/dag.js @@ -1,27 +1,20 @@ 'use strict' -const promisify = require('promisify-es6') +const callbackify = require('callbackify') const CID = require('cids') -const pull = require('pull-stream') -const iterToPull = require('async-iterator-to-pull-stream') -const setImmediate = require('async/setImmediate') +const all = require('async-iterator-all') const errCode = require('err-code') const multicodec = require('multicodec') module.exports = function dag (self) { return { - put: promisify((dagNode, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} - } - + put: callbackify.variadic(async (dagNode, options) => { options = options || {} if (options.cid && (options.format || options.hashAlg)) { - return callback(new Error('Can\'t put dag node. Please provide either `cid` OR `format` and `hashAlg` options.')) + throw new Error('Can\'t put dag node. Please provide either `cid` OR `format` and `hashAlg` options.') } else if (((options.format && !options.hashAlg) || (!options.format && options.hashAlg))) { - return callback(new Error('Can\'t put dag node. Please provide `format` AND `hashAlg` options.')) + throw new Error('Can\'t put dag node. Please provide `format` AND `hashAlg` options.') } const optionDefaults = { @@ -51,47 +44,34 @@ module.exports = function dag (self) { } } - self._ipld.put(dagNode, options.format, { + const cid = await self._ipld.put(dagNode, options.format, { hashAlg: options.hashAlg, cidVersion: options.version - }).then( - (cid) => { - if (options.preload !== false) { - self._preload(cid) - } - return callback(null, cid) - }, - (error) => callback(error) - ) - }), + }) - get: promisify((cid, path, options, callback) => { - if (typeof path === 'function') { - callback = path - path = undefined + if (options.preload !== false) { + self._preload(cid) } - if (typeof options === 'function') { - callback = options - - // Allow options in path position - if (typeof path !== 'string') { - options = path - path = undefined - } else { - options = {} - } - } + return cid + }), + get: callbackify.variadic(async (cid, path, options) => { options = options || {} + // Allow options in path position + if (path !== undefined && typeof path !== 'string') { + options = path + path = undefined + } + if (typeof cid === 'string') { const split = cid.split('/') try { cid = new CID(split[0]) } catch (err) { - return setImmediate(() => callback(errCode(err, 'ERR_INVALID_CID'))) + throw errCode(err, 'ERR_INVALID_CID') } split.shift() @@ -105,7 +85,7 @@ module.exports = function dag (self) { try { cid = new CID(cid) } catch (err) { - return setImmediate(() => callback(errCode(err, 'ERR_INVALID_CID'))) + throw errCode(err, 'ERR_INVALID_CID') } } @@ -114,51 +94,43 @@ module.exports = function dag (self) { } if (path == null || path === '/') { - self._ipld.get(cid).then( - (value) => { - callback(null, { - value, - remainderPath: '' - }) - }, - (error) => callback(error) - ) + const value = await self._ipld.get(cid) + + return { + value, + remainderPath: '' + } } else { - const result = self._ipld.resolve(cid, path) - const promisedValue = options.localResolve ? result.first() : result.last() - promisedValue.then( - (value) => callback(null, value), - (error) => callback(error) - ) - } - }), + let result - tree: promisify((cid, path, options, callback) => { - if (typeof path === 'object') { - callback = options - options = path - path = undefined - } + for await (const entry of self._ipld.resolve(cid, path)) { + if (options.localResolve) { + return entry + } - if (typeof path === 'function') { - callback = path - path = undefined - } + result = entry + } - if (typeof options === 'function') { - callback = options - options = {} + return result } + }), + tree: callbackify.variadic(async (cid, path, options) => { // eslint-disable-line require-await options = options || {} + // Allow options in path position + if (path !== undefined && typeof path !== 'string') { + options = path + path = undefined + } + if (typeof cid === 'string') { const split = cid.split('/') try { cid = new CID(split[0]) } catch (err) { - return setImmediate(() => callback(errCode(err, 'ERR_INVALID_CID'))) + throw errCode(err, 'ERR_INVALID_CID') } split.shift() @@ -174,10 +146,7 @@ module.exports = function dag (self) { self._preload(cid) } - pull( - iterToPull(self._ipld.tree(cid, path, options)), - pull.collect(callback) - ) + return all(self._ipld.tree(cid, path, options)) }) } } diff --git a/src/core/components/dht.js b/src/core/components/dht.js index 224e9dc53a..78ca7fbf6d 100644 --- a/src/core/components/dht.js +++ b/src/core/components/dht.js @@ -1,16 +1,11 @@ 'use strict' -const promisify = require('promisify-es6') +const callbackify = require('callbackify') const PeerId = require('peer-id') const PeerInfo = require('peer-info') const CID = require('cids') -const each = require('async/each') -const nextTick = require('async/nextTick') const { every, forEach } = require('p-iteration') -const callbackify = require('callbackify') - const errcode = require('err-code') - const debug = require('debug') const log = debug('ipfs:dht') log.error = debug('ipfs:dht:error') @@ -26,12 +21,7 @@ module.exports = (self) => { * @param {function(Error)} [callback] * @returns {Promise|void} */ - get: promisify((key, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} - } - + get: callbackify.variadic(async (key, options) => { // eslint-disable-line require-await options = options || {} if (!Buffer.isBuffer(key)) { @@ -40,11 +30,11 @@ module.exports = (self) => { } catch (err) { log.error(err) - return nextTick(() => callback(errcode(err, 'ERR_INVALID_CID'))) + throw errcode(err, 'ERR_INVALID_CID') } } - self.libp2p.dht.get(key, options, callback) + return self.libp2p.dht.get(key, options) }), /** @@ -59,18 +49,18 @@ module.exports = (self) => { * @param {function(Error)} [callback] * @returns {Promise|void} */ - put: promisify((key, value, callback) => { + put: callbackify(async (key, value) => { // eslint-disable-line require-await if (!Buffer.isBuffer(key)) { try { key = (new CID(key)).buffer } catch (err) { log.error(err) - return nextTick(() => callback(errcode(err, 'ERR_INVALID_CID'))) + throw errcode(err, 'ERR_INVALID_CID') } } - self.libp2p.dht.put(key, value, callback) + return self.libp2p.dht.put(key, value) }), /** @@ -83,12 +73,7 @@ module.exports = (self) => { * @param {function(Error, Array)} [callback] * @returns {Promise|void} */ - findProvs: promisify((key, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} - } - + findProvs: callbackify.variadic(async (key, options) => { // eslint-disable-line require-await options = options || {} if (typeof key === 'string') { @@ -97,11 +82,11 @@ module.exports = (self) => { } catch (err) { log.error(err) - return nextTick(() => callback(errcode(err, 'ERR_INVALID_CID'))) + throw errcode(err, 'ERR_INVALID_CID') } } - self.libp2p.contentRouting.findProviders(key, options, callback) + return self.libp2p.contentRouting.findProviders(key, options) }), /** @@ -111,12 +96,12 @@ module.exports = (self) => { * @param {function(Error, PeerInfo)} [callback] * @returns {Promise|void} */ - findPeer: promisify((peer, callback) => { + findPeer: callbackify(async (peer) => { // eslint-disable-line require-await if (typeof peer === 'string') { peer = PeerId.createFromB58String(peer) } - self.libp2p.peerRouting.findPeer(peer, callback) + return self.libp2p.peerRouting.findPeer(peer) }), /** @@ -128,19 +113,15 @@ module.exports = (self) => { * @param {function(Error)} [callback] * @returns {Promise|void} */ - provide: callbackify(async (keys, options) => { + provide: callbackify.variadic(async (keys, options) => { + options = options || {} + if (!Array.isArray(keys)) { keys = [keys] } - if (typeof options === 'function') { - callback = options - options = {} - } - - options = options || {} // ensure blocks are actually local - const has = await every(keys, async (key) => { + const has = await every(keys, (key) => { return self._repo.blocks.has(key) }) @@ -155,9 +136,7 @@ module.exports = (self) => { // TODO: Implement recursive providing throw errcode('not implemented yet', 'ERR_NOT_IMPLEMENTED_YET') } else { - forEach(keys, (cid) => { - self.libp2p.contentRouting.provide(cid) - }) + await forEach(keys, (cid) => self.libp2p.contentRouting.provide(cid)) } }), @@ -168,25 +147,27 @@ module.exports = (self) => { * @param {function(Error, Array)} [callback] * @returns {Promise>|void} */ - query: promisify((peerId, callback) => { + query: callbackify(async (peerId) => { if (typeof peerId === 'string') { try { peerId = PeerId.createFromB58String(peerId) } catch (err) { log.error(err) - return callback(err) + + throw err } } - // TODO expose this method in peerRouting - self.libp2p._dht.getClosestPeers(peerId.toBytes(), (err, peerIds) => { - if (err) { - log.error(err) - return callback(err) - } + try { + // TODO expose this method in peerRouting + const peerIds = await self.libp2p._dht.getClosestPeers(peerId.toBytes()) - callback(null, peerIds.map((id) => new PeerInfo(id))) - }) + return peerIds.map((id) => new PeerInfo(id)) + } catch (err) { + log.error(err) + + throw err + } }) } } diff --git a/src/core/components/dns.js b/src/core/components/dns.js index 7a083fdd6c..380be30329 100644 --- a/src/core/components/dns.js +++ b/src/core/components/dns.js @@ -2,7 +2,7 @@ // dns-nodejs gets replaced by dns-browser when webpacked/browserified const dns = require('../runtime/dns-nodejs') -const promisify = require('promisify-es6') +const callbackify = require('callbackify') function fqdnFixups (domain) { // Allow resolution of .eth names via .eth.link @@ -14,19 +14,15 @@ function fqdnFixups (domain) { } module.exports = () => { - return promisify((domain, opts, callback) => { - if (typeof domain !== 'string') { - return callback(new Error('Invalid arguments, domain must be a string')) - } + return callbackify.variadic(async (domain, opts) => { // eslint-disable-line require-await + opts = opts || {} - if (typeof opts === 'function') { - callback = opts - opts = {} + if (typeof domain !== 'string') { + throw new Error('Invalid arguments, domain must be a string') } - opts = opts || {} domain = fqdnFixups(domain) - dns(domain, opts, callback) + return dns(domain, opts) }) } diff --git a/src/core/components/files-mfs.js b/src/core/components/files-mfs.js index 0bdb95890c..77f2eed0ab 100644 --- a/src/core/components/files-mfs.js +++ b/src/core/components/files-mfs.js @@ -18,7 +18,9 @@ const { cidToString } = require('../../utils/cid') * @typedef { import("pull-stream") } PullStream */ -const mapLsFile = (options = {}) => { +const mapLsFile = (options) => { + options = options || {} + const long = options.long || options.l return (file) => { diff --git a/src/core/components/files-regular/add-from-url.js b/src/core/components/files-regular/add-from-url.js index c9207c98cc..bc12850cbe 100644 --- a/src/core/components/files-regular/add-from-url.js +++ b/src/core/components/files-regular/add-from-url.js @@ -5,14 +5,15 @@ const nodeify = require('promise-nodeify') const { default: ky } = require('ky-universal') module.exports = (ipfs) => { - const addFromURL = async (url, opts = {}) => { + const addFromURL = async (url, opts) => { + opts = opts || {} const res = await ky.get(url) const path = decodeURIComponent(new URL(res.url).pathname.split('/').pop()) const content = Buffer.from(await res.arrayBuffer()) return ipfs.add({ content, path }, opts) } - return (name, opts = {}, cb) => { + return (name, opts, cb) => { if (typeof opts === 'function') { cb = opts opts = {} diff --git a/src/core/components/files-regular/refs-local-pull-stream.js b/src/core/components/files-regular/refs-local-pull-stream.js index 05ce98b627..77c396f58f 100644 --- a/src/core/components/files-regular/refs-local-pull-stream.js +++ b/src/core/components/files-regular/refs-local-pull-stream.js @@ -2,23 +2,15 @@ const CID = require('cids') const base32 = require('base32.js') -const pull = require('pull-stream') -const pullDefer = require('pull-defer') +const itToPull = require('async-iterator-to-pull-stream') module.exports = function (self) { return () => { - const deferred = pullDefer.source() - - self._repo.blocks.query({ keysOnly: true }).then( - (blocks) => { - const refs = blocks.map(b => dsKeyToRef(b.key)) - deferred.resolve(pull.values(refs)) - }, (err) => { - deferred.resolve(pull.error(err)) + return itToPull((async function * () { + for await (const result of self._repo.blocks.query({ keysOnly: true })) { + yield dsKeyToRef(result.key) } - ) - - return deferred + })()) } } diff --git a/src/core/components/files-regular/refs-pull-stream.js b/src/core/components/files-regular/refs-pull-stream.js index aec4ba8a6d..3b2d6f8ed1 100644 --- a/src/core/components/files-regular/refs-pull-stream.js +++ b/src/core/components/files-regular/refs-pull-stream.js @@ -11,7 +11,9 @@ const { normalizePath } = require('./utils') const { Format } = require('./refs') module.exports = function (self) { - return function (ipfsPath, options = {}) { + return function (ipfsPath, options) { + options = options || {} + if (options.maxDepth === 0) { return pull.empty() } diff --git a/src/core/components/files-regular/utils.js b/src/core/components/files-regular/utils.js index 4d4f460497..bd5afc5fd2 100644 --- a/src/core/components/files-regular/utils.js +++ b/src/core/components/files-regular/utils.js @@ -101,7 +101,9 @@ const parseChunkSize = (str, name) => { return size } -const mapFile = (options = {}) => { +const mapFile = (options) => { + options = options || {} + return (file) => { let size = 0 let type = 'dir' diff --git a/src/core/components/id.js b/src/core/components/id.js index bd3c3eb9dd..a8fd75f92d 100644 --- a/src/core/components/id.js +++ b/src/core/components/id.js @@ -1,17 +1,11 @@ 'use strict' -const promisify = require('promisify-es6') -const setImmediate = require('async/setImmediate') +const callbackify = require('callbackify') const pkgversion = require('../../../package.json').version module.exports = function id (self) { - return promisify((opts, callback) => { - if (typeof opts === 'function') { - callback = opts - opts = {} - } - - setImmediate(() => callback(null, { + return callbackify(async () => { // eslint-disable-line require-await + return { id: self._peerInfo.id.toB58String(), publicKey: self._peerInfo.id.pubKey.bytes.toString('base64'), addresses: self._peerInfo.multiaddrs @@ -21,6 +15,6 @@ module.exports = function id (self) { .sort(), agentVersion: `js-ipfs/${pkgversion}`, protocolVersion: '9000' - })) + } }) } diff --git a/src/core/components/init-assets.js b/src/core/components/init-assets.js index c9c1d5ec03..bd6e27d242 100644 --- a/src/core/components/init-assets.js +++ b/src/core/components/init-assets.js @@ -1,46 +1,19 @@ 'use strict' const path = require('path') -const glob = require('glob') -const pull = require('pull-stream') -const file = require('pull-file') const CID = require('cids') // Add the default assets to the repo. module.exports = async function addDefaultAssets (self, log) { const initDocsPath = path.join(__dirname, '../../init-files/init-docs') - const index = initDocsPath.lastIndexOf(path.sep) - console.info('adding', initDocsPath) - try { - const results = await self.addFromFs(initDocsPath, { - recursive: true - }) + const results = await self.addFromFs(initDocsPath, { + recursive: true + }) - console.info(results) - } catch (err) { - console.error(err) - } + const dir = results.filter(file => file.path === 'init-docs').pop() + const cid = new CID(dir.hash) - - - - /* - - pull.through(file => { - if (file.path === 'init-docs') { - const cid = new CID(file.hash) - log('to get started, enter:\n') - log(`\tjsipfs cat /ipfs/${cid.toBaseEncodedString()}/readme\n`) - } - }), - pull.collect((err) => { - if (err) { - return callback(err) - } - - callback(null, true) - }) - ) - */ + log('to get started, enter:\n') + log(`\tjsipfs cat /ipfs/${cid.toBaseEncodedString()}/readme\n`) } diff --git a/src/core/components/init.js b/src/core/components/init.js index edc622b87d..03e6caae13 100644 --- a/src/core/components/init.js +++ b/src/core/components/init.js @@ -3,6 +3,7 @@ const peerId = require('peer-id') const mergeOptions = require('merge-options') const callbackify = require('callbackify') +const promisify = require('promisify-es6') const defaultConfig = require('../runtime/config-nodejs.js') const Keychain = require('libp2p-keychain') const { @@ -16,20 +17,20 @@ const OfflineDatastore = require('../ipns/routing/offline-datastore') const addDefaultAssets = require('./init-assets') -async function createPeerId (self, opts) { +function createPeerId (self, opts) { if (opts.privateKey) { self.log('using user-supplied private-key') if (typeof opts.privateKey === 'object') { return opts.privateKey } else { - return peerId.createFromPrivKey(Buffer.from(opts.privateKey, 'base64')) + return promisify(peerId.createFromPrivKey)(Buffer.from(opts.privateKey, 'base64')) } } else { // Generate peer identity keypair + transform to desired format + add to config. opts.log(`generating ${opts.bits}-bit RSA keypair...`, false) self.log('generating peer id: %s bits', opts.bits) - return await peerId.create({ bits: opts.bits }) + return promisify(peerId.create)({ bits: opts.bits }) } } @@ -53,7 +54,6 @@ async function createRepo (self, opts) { opts.log = opts.log || function () {} const config = mergeOptions(defaultConfig(), self._options.config) - let privateKey // Verify repo does not exist yet const exists = await self._repo.exists() @@ -62,7 +62,7 @@ async function createRepo (self, opts) { throw Error('repo already exists') } - let peerId = await createPeerId(self, opts) + const peerId = await createPeerId(self, opts) self.log('identity generated') @@ -70,7 +70,7 @@ async function createRepo (self, opts) { PeerID: peerId.toB58String(), PrivKey: peerId.privKey.bytes.toString('base64') } - privateKey = peerId.privKey + const privateKey = peerId.privKey if (opts.pass) { config.Keychain = Keychain.generateOptions() @@ -100,7 +100,6 @@ async function createRepo (self, opts) { // add empty unixfs dir object (go-ipfs assumes this exists) return addRepoAssets(self, privateKey, opts) - } async function addRepoAssets (self, privateKey, opts) { @@ -123,7 +122,6 @@ async function addRepoAssets (self, privateKey, opts) { if (typeof addDefaultAssets === 'function') { self.log('Adding default assets') - // addDefaultAssets is undefined on browsers. // See package.json browser config return addDefaultAssets(self, opts.log) @@ -134,19 +132,13 @@ module.exports = function init (self) { return callbackify(async (opts) => { opts = opts || {} - try { - await createRepo(self, opts) - self.log('Created repo') - await self.preStart() + await createRepo(self, opts) + self.log('Created repo') - self.log('Done pre-start') + await self.preStart() + self.log('Done pre-start') - self.state.initialized() - self.emit('init') - } catch(err) { - self.emit('error', err) - - throw err - } + self.state.initialized() + self.emit('init') }) } diff --git a/src/core/components/key.js b/src/core/components/key.js index 97bb2973a3..0bc5147969 100644 --- a/src/core/components/key.js +++ b/src/core/components/key.js @@ -2,46 +2,45 @@ // See https://github.com/ipfs/specs/tree/master/keystore -const promisify = require('promisify-es6') +const callbackify = require('callbackify') module.exports = function key (self) { return { - gen: promisify((name, opts, callback) => { + gen: callbackify.variadic(async (name, opts) => { // eslint-disable-line require-await opts = opts || {} - self._keychain.createKey(name, opts.type, opts.size, callback) + + return self._keychain.createKey(name, opts.type, opts.size) }), - info: promisify((name, callback) => { - self._keychain.findKeyByName(name, callback) + info: callbackify(async (name) => { // eslint-disable-line require-await + return self._keychain.findKeyByName(name) }), - list: promisify((callback) => { - self._keychain.listKeys(callback) + list: callbackify(async () => { // eslint-disable-line require-await + return self._keychain.listKeys() }), - rm: promisify((name, callback) => { - self._keychain.removeKey(name, callback) + rm: callbackify(async (name) => { // eslint-disable-line require-await + return self._keychain.removeKey(name) }), - rename: promisify((oldName, newName, callback) => { - self._keychain.renameKey(oldName, newName, (err, key) => { - if (err) return callback(err) - const result = { - was: oldName, - now: key.name, - id: key.id, - overwrite: false - } - callback(null, result) - }) + rename: callbackify(async (oldName, newName) => { + const key = await self._keychain.renameKey(oldName, newName) + + return { + was: oldName, + now: key.name, + id: key.id, + overwrite: false + } }), - import: promisify((name, pem, password, callback) => { - self._keychain.importKey(name, pem, password, callback) + import: callbackify(async (name, pem, password) => { // eslint-disable-line require-await + return self._keychain.importKey(name, pem, password) }), - export: promisify((name, password, callback) => { - self._keychain.exportKey(name, password, callback) + export: callbackify(async (name, password) => { // eslint-disable-line require-await + return self._keychain.exportKey(name, password) }) } } diff --git a/src/core/components/name-pubsub.js b/src/core/components/name-pubsub.js index 13ce8962da..4fc4775713 100644 --- a/src/core/components/name-pubsub.js +++ b/src/core/components/name-pubsub.js @@ -2,7 +2,7 @@ const debug = require('debug') const errcode = require('err-code') -const promisify = require('promisify-es6') +const callbackify = require('callbackify') const IpnsPubsubDatastore = require('../ipns/routing/pubsub-datastore') @@ -46,10 +46,10 @@ module.exports = function namePubsub (self) { * * @returns {Promise|void} */ - state: promisify((callback) => { - callback(null, { + state: callbackify(async () => { // eslint-disable-line require-await + return { enabled: isNamePubsubEnabled(self) - }) + } }), /** * Cancel a name subscription. @@ -58,15 +58,10 @@ module.exports = function namePubsub (self) { * @param {function(Error)} [callback] * @returns {Promise|void} */ - cancel: promisify((name, callback) => { - let pubsub - try { - pubsub = getPubsubRouting(self) - } catch (err) { - return callback(err) - } + cancel: callbackify(async (name) => { // eslint-disable-line require-await + const pubsub = getPubsubRouting(self) - pubsub.cancel(name, callback) + return pubsub.cancel(name) }), /** * Show current name subscriptions. @@ -74,15 +69,10 @@ module.exports = function namePubsub (self) { * @param {function(Error)} [callback] * @returns {Promise|void} */ - subs: promisify((callback) => { - let pubsub - try { - pubsub = getPubsubRouting(self) - } catch (err) { - return callback(err) - } + subs: callbackify(async () => { // eslint-disable-line require-await + const pubsub = getPubsubRouting(self) - pubsub.getSubscriptions(callback) + return pubsub.getSubscriptions() }) } } diff --git a/src/core/components/name.js b/src/core/components/name.js index d6147e5da6..09dc5dc381 100644 --- a/src/core/components/name.js +++ b/src/core/components/name.js @@ -1,15 +1,14 @@ 'use strict' const debug = require('debug') -const promisify = require('promisify-es6') -const waterfall = require('async/waterfall') -const parallel = require('async/parallel') +const callbackify = require('callbackify') const human = require('human-to-milliseconds') const crypto = require('libp2p-crypto') const errcode = require('err-code') const mergeOptions = require('merge-options') const mh = require('multihashes') const isDomain = require('is-domain-name') +const promisify = require('promisify-es6') const log = debug('ipfs:name') log.error = debug('ipfs:name:error') @@ -18,36 +17,32 @@ const namePubsub = require('./name-pubsub') const utils = require('../utils') const path = require('../ipns/path') -const keyLookup = (ipfsNode, kname, callback) => { +const keyLookup = async (ipfsNode, kname) => { if (kname === 'self') { - return callback(null, ipfsNode._peerInfo.id.privKey) + return ipfsNode._peerInfo.id.privKey } - const pass = ipfsNode._options.pass + try { + const pass = ipfsNode._options.pass + const pem = await ipfsNode._keychain.exportKey(kname, pass) + const privateKey = await promisify(crypto.keys.import.bind(crypto.keys))(pem, pass) - waterfall([ - (cb) => ipfsNode._keychain.exportKey(kname, pass, cb), - (pem, cb) => crypto.keys.import(pem, pass, cb) - ], (err, privateKey) => { - if (err) { - log.error(err) - return callback(errcode(err, 'ERR_CANNOT_GET_KEY')) - } + return privateKey + } catch (err) { + log.error(err) - return callback(null, privateKey) - }) + throw errcode(err, 'ERR_CANNOT_GET_KEY') + } } -const appendRemainder = (cb, remainder) => { - return (err, result) => { - if (err) { - return cb(err) - } - if (remainder.length) { - return cb(null, result + '/' + remainder.join('/')) - } - return cb(null, result) +const appendRemainder = async (result, remainder) => { + result = await result + + if (remainder.length) { + return result + '/' + remainder.join('/') } + + return result } /** @@ -81,19 +76,15 @@ module.exports = function name (self) { * @param {function(Error)} [callback] * @returns {Promise|void} */ - publish: promisify((value, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} - } - + publish: callbackify.variadic(async (value, options) => { options = options || {} + const resolve = !(options.resolve === false) const lifetime = options.lifetime || '24h' const key = options.key || 'self' if (!self.isOnline()) { - return callback(errcode(new Error(utils.OFFLINE_ERROR), 'OFFLINE_ERROR')) + throw errcode(new Error(utils.OFFLINE_ERROR), 'OFFLINE_ERROR') } // TODO: params related logic should be in the core implementation @@ -103,7 +94,8 @@ module.exports = function name (self) { value = utils.normalizePath(value) } catch (err) { log.error(err) - return callback(err) + + throw err } let pubLifetime @@ -114,24 +106,19 @@ module.exports = function name (self) { pubLifetime = pubLifetime.toFixed(6) } catch (err) { log.error(err) - return callback(err) + + throw err } // TODO: ttl human for cache - - parallel([ - (cb) => keyLookup(self, key, cb), + const results = await Promise.all([ // verify if the path exists, if not, an error will stop the execution - (cb) => resolve.toString() === 'true' ? path.resolvePath(self, value, cb) : cb() - ], (err, results) => { - if (err) { - log.error(err) - return callback(err) - } + keyLookup(self, key), + resolve.toString() === 'true' ? path.resolvePath(self, value) : Promise.resolve() + ]) - // Start publishing process - self._ipns.publish(results[0], value, pubLifetime, callback) - }) + // Start publishing process + return self._ipns.publish(results[0], value, pubLifetime) }), /** @@ -144,22 +131,17 @@ module.exports = function name (self) { * @param {function(Error)} [callback] * @returns {Promise|void} */ - resolve: promisify((name, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} - } - + resolve: callbackify.variadic(async (name, options) => { // eslint-disable-line require-await options = mergeOptions({ nocache: false, recursive: true - }, options) + }, options || {}) const offline = self._options.offline // TODO: params related logic should be in the core implementation if (offline && options.nocache) { - return callback(errcode(new Error('cannot specify both offline and nocache'), 'ERR_NOCACHE_AND_OFFLINE')) + throw errcode(new Error('cannot specify both offline and nocache'), 'ERR_NOCACHE_AND_OFFLINE') } // Set node id as name for being resolved, if it is not received @@ -177,20 +159,20 @@ module.exports = function name (self) { } catch (err) { // lets check if we have a domain ex. /ipns/ipfs.io and resolve with dns if (isDomain(hash)) { - return self.dns(hash, options, appendRemainder(callback, remainder)) + return appendRemainder(self.dns(hash, options), remainder) } log.error(err) - return callback(errcode(new Error('Invalid IPNS name'), 'ERR_IPNS_INVALID_NAME')) + throw errcode(new Error('Invalid IPNS name'), 'ERR_IPNS_INVALID_NAME') } // multihash is valid lets resolve with IPNS // IPNS resolve needs a online daemon if (!self.isOnline() && !offline) { - return callback(errcode(new Error(utils.OFFLINE_ERROR), 'OFFLINE_ERROR')) + throw errcode(new Error(utils.OFFLINE_ERROR), 'OFFLINE_ERROR') } - self._ipns.resolve(`/${namespace}/${hash}`, options, appendRemainder(callback, remainder)) + return appendRemainder(self._ipns.resolve(`/${namespace}/${hash}`, options), remainder) }), pubsub: namePubsub(self) } diff --git a/src/core/components/object.js b/src/core/components/object.js index aa0776efba..1f7e3f7cbe 100644 --- a/src/core/components/object.js +++ b/src/core/components/object.js @@ -1,9 +1,6 @@ 'use strict' -const waterfall = require('async/waterfall') -const parallel = require('async/parallel') -const setImmediate = require('async/setImmediate') -const promisify = require('promisify-es6') +const callbackify = require('callbackify') const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode const DAGLink = dagPB.DAGLink @@ -29,18 +26,18 @@ function normalizeMultihash (multihash, enc) { } } -function parseBuffer (buf, encoding, callback) { +function parseBuffer (buf, encoding) { switch (encoding) { case 'json': - return parseJSONBuffer(buf, callback) + return parseJSONBuffer(buf) case 'protobuf': - return parseProtoBuffer(buf, callback) + return parseProtoBuffer(buf) default: - callback(new Error(`unkown encoding: ${encoding}`)) + throw new Error(`unkown encoding: ${encoding}`) } } -function parseJSONBuffer (buf, callback) { +function parseJSONBuffer (buf) { let data let links @@ -56,24 +53,14 @@ function parseJSONBuffer (buf, callback) { }) data = Buffer.from(parsed.Data) } catch (err) { - return callback(new Error('failed to parse JSON: ' + err)) + throw new Error('failed to parse JSON: ' + err) } - try { - callback(null, new DAGNode(data, links)) - } catch (err) { - callback(err) - } + return new DAGNode(data, links) } -function parseProtoBuffer (buf, callback) { - let obj - try { - obj = dagPB.util.deserialize(buf) - } catch (err) { - return callback(err) - } - callback(null, obj) +function parseProtoBuffer (buf) { + return dagPB.util.deserialize(buf) } function findLinks (node, links = []) { @@ -108,100 +95,61 @@ function findLinks (node, links = []) { } module.exports = function object (self) { - function editAndSave (edit) { - return (multihash, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} - } + async function editAndSave (multihash, edit, options) { + options = options || {} - options = options || {} + const node = await self.object.get(multihash, options) - waterfall([ - (cb) => { - self.object.get(multihash, options, cb) - }, - (node, cb) => { - // edit applies the edit func passed to - // editAndSave - edit(node, (err, node) => { - if (err) { - return cb(err) - } - - self._ipld.put(node, multicodec.DAG_PB, { - cidVersion: 0, - hashAlg: multicodec.SHA2_256 - }).then( - (cid) => { - if (options.preload !== false) { - self._preload(cid) - } - - cb(null, cid) - }, - (error) => cb(error) - ) - }) - } - ], callback) + // edit applies the edit func passed to + // editAndSave + const cid = await self._ipld.put(edit(node), multicodec.DAG_PB, { + cidVersion: 0, + hashAlg: multicodec.SHA2_256 + }) + + if (options.preload !== false) { + self._preload(cid) } + + return cid } return { - new: promisify((template, options, callback) => { - if (typeof template === 'function') { - callback = template - template = undefined - options = {} - } + new: callbackify.variadic(async (template, options) => { + options = options || {} - if (typeof options === 'function') { - callback = options - options = {} + // allow options in the template position + if (template && typeof template !== 'string') { + options = template + template = null } - options = options || {} - let data if (template) { - if (template !== 'unixfs-dir') { - return setImmediate(() => callback(new Error('unknown template'))) + if (template === 'unixfs-dir') { + data = (new Unixfs('directory')).marshal() + } else { + throw new Error('unknown template') } - data = (new Unixfs('directory')).marshal() } else { data = Buffer.alloc(0) } - let node - - try { - node = new DAGNode(data) - } catch (err) { - return callback(err) - } + const node = new DAGNode(data) - self._ipld.put(node, multicodec.DAG_PB, { + const cid = await self._ipld.put(node, multicodec.DAG_PB, { cidVersion: 0, hashAlg: multicodec.SHA2_256 - }).then( - (cid) => { - if (options.preload !== false) { - self._preload(cid) - } - - callback(null, cid) - }, - (error) => callback(error) - ) - }), - put: promisify((obj, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} + }) + + if (options.preload !== false) { + self._preload(cid) } + return cid + }), + put: callbackify.variadic(async (obj, options) => { options = options || {} const encoding = options.enc @@ -209,63 +157,38 @@ module.exports = function object (self) { if (Buffer.isBuffer(obj)) { if (encoding) { - parseBuffer(obj, encoding, (err, _node) => { - if (err) { - return callback(err) - } - node = _node - next() - }) + node = await parseBuffer(obj, encoding) } else { - try { - node = new DAGNode(obj) - } catch (err) { - return callback(err) - } - - next() + node = new DAGNode(obj) } } else if (DAGNode.isDAGNode(obj)) { // already a dag node node = obj - next() } else if (typeof obj === 'object') { - try { - node = new DAGNode(obj.Data, obj.Links) - } catch (err) { - return callback(err) - } - - next() + node = new DAGNode(obj.Data, obj.Links) } else { - return callback(new Error('obj not recognized')) + throw new Error('obj not recognized') } - function next () { - self._gcLock.readLock((cb) => { - self._ipld.put(node, multicodec.DAG_PB, { - cidVersion: 0, - hashAlg: multicodec.SHA2_256 - }).then( - (cid) => { - if (options.preload !== false) { - self._preload(cid) - } - - cb(null, cid) - }, - cb - ) - }, callback) - } - }), + const release = await self._gcLock.readLock() + + try { + const cid = await self._ipld.put(node, multicodec.DAG_PB, { + cidVersion: 0, + hashAlg: multicodec.SHA2_256 + }) + + if (options.preload !== false) { + self._preload(cid) + } - get: promisify((multihash, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} + return cid + } finally { + release() } + }), + get: callbackify.variadic(async (multihash, options) => { // eslint-disable-line require-await options = options || {} let mh, cid @@ -273,13 +196,13 @@ module.exports = function object (self) { try { mh = normalizeMultihash(multihash, options.enc) } catch (err) { - return setImmediate(() => callback(errCode(err, 'ERR_INVALID_MULTIHASH'))) + throw errCode(err, 'ERR_INVALID_MULTIHASH') } try { cid = new CID(mh) } catch (err) { - return setImmediate(() => callback(errCode(err, 'ERR_INVALID_CID'))) + throw errCode(err, 'ERR_INVALID_CID') } if (options.cidVersion === 1) { @@ -290,149 +213,90 @@ module.exports = function object (self) { self._preload(cid) } - self._ipld.get(cid).then( - (node) => callback(null, node), - (error) => callback(error) - ) + return self._ipld.get(cid) }), - data: promisify((multihash, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} - } + data: callbackify.variadic(async (multihash, options) => { + options = options || {} - self.object.get(multihash, options, (err, node) => { - if (err) { - return callback(err) - } + const node = await self.object.get(multihash, options) - callback(null, node.Data) - }) + return node.Data }), - links: promisify((multihash, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} - } + links: callbackify.variadic(async (multihash, options) => { + options = options || {} const cid = new CID(multihash) + const result = await self.dag.get(cid, options) - self.dag.get(cid, options, (err, result) => { - if (err) { - return callback(err) - } + if (cid.codec === 'raw') { + return [] + } - if (cid.codec === 'raw') { - return callback(null, []) - } + if (cid.codec === 'dag-pb') { + return result.value.Links + } - if (cid.codec === 'dag-pb') { - return callback(null, result.value.Links) - } + if (cid.codec === 'dag-cbor') { + return findLinks(result) + } - if (cid.codec === 'dag-cbor') { - const links = findLinks(result) + throw new Error(`Cannot resolve links from codec ${cid.codec}`) + }), - return callback(null, links) - } + stat: callbackify.variadic(async (multihash, options) => { + options = options || {} - callback(new Error(`Cannot resolve links from codec ${cid.codec}`)) + const node = await self.object.get(multihash, options) + const serialized = dagPB.util.serialize(node) + const cid = await dagPB.util.cid(serialized, { + cidVersion: 0 }) - }), - stat: promisify((multihash, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} + const blockSize = serialized.length + const linkLength = node.Links.reduce((a, l) => a + l.Tsize, 0) + + return { + Hash: cid.toBaseEncodedString(), + NumLinks: node.Links.length, + BlockSize: blockSize, + LinksSize: blockSize - node.Data.length, + DataSize: node.Data.length, + CumulativeSize: blockSize + linkLength } + }), - options = options || {} + patch: { + addLink: callbackify.variadic(async (multihash, link, options) => { // eslint-disable-line require-await + return editAndSave(multihash, (node) => { + node.addLink(link) - waterfall([ - (cb) => self.object.get(multihash, options, cb), - (node, cb) => { - cb(null, { - node, - serialized: dagPB.util.serialize(node) - }) - }, - ({ node, serialized }, cb) => { - parallel({ - serialized: (next) => next(null, serialized), - cid: (next) => dagPB.util.cid(serialized, { - cidVersion: 0 - }).then((cid) => next(null, cid), next), - node: (next) => next(null, node) - }, cb) - } - ], (err, result) => { - if (err) { - return callback(err) - } + return node + }, options) + }), - const blockSize = result.serialized.length - const linkLength = result.node.Links.reduce((a, l) => a + l.Tsize, 0) + rmLink: callbackify.variadic(async (multihash, linkRef, options) => { // eslint-disable-line require-await + return editAndSave(multihash, (node) => { + node.rmLink(linkRef.Name || linkRef.name) - callback(null, { - Hash: result.cid.toBaseEncodedString(), - NumLinks: result.node.Links.length, - BlockSize: blockSize, - LinksSize: blockSize - result.node.Data.length, - DataSize: result.node.Data.length, - CumulativeSize: blockSize + linkLength - }) - }) - }), + return node + }, options) + }), - patch: promisify({ - addLink (multihash, link, options, callback) { - editAndSave((node, cb) => { - node.addLink(link) - cb(null, node) - })(multihash, options, callback) - }, - - rmLink (multihash, linkRef, options, callback) { - editAndSave((node, cb) => { - linkRef = linkRef.Name || linkRef.name - - try { - node.rmLink(linkRef) - } catch (err) { - return cb(err) - } - - cb(null, node) - })(multihash, options, callback) - }, - - appendData (multihash, data, options, callback) { - editAndSave((node, cb) => { + appendData: callbackify.variadic(async (multihash, data, options) => { // eslint-disable-line require-await + return editAndSave(multihash, (node) => { const newData = Buffer.concat([node.Data, data]) - try { - node = new DAGNode(newData, node.Links) - } catch (err) { - return cb(err) - } - - cb(null, node) - })(multihash, options, callback) - }, - - setData (multihash, data, options, callback) { - editAndSave((node, cb) => { - try { - node = new DAGNode(data, node.Links) - } catch (err) { - return cb(err) - } - - cb(null, node) - })(multihash, options, callback) - } - }) + return new DAGNode(newData, node.Links) + }, options) + }), + + setData: callbackify.variadic(async (multihash, data, options) => { // eslint-disable-line require-await + return editAndSave(multihash, (node) => { + return new DAGNode(data, node.Links) + }, options) + }) + } } } diff --git a/src/core/components/pin.js b/src/core/components/pin.js index ca62581f00..cbe0c8a250 100644 --- a/src/core/components/pin.js +++ b/src/core/components/pin.js @@ -1,181 +1,159 @@ /* eslint max-nested-callbacks: ["error", 8] */ 'use strict' -const promisify = require('promisify-es6') const callbackify = require('callbackify') -const CID = require('cids') -const map = require('async/map') -const mapSeries = require('async/mapSeries') -const waterfall = require('async/waterfall') -const setImmediate = require('async/setImmediate') const errCode = require('err-code') const multibase = require('multibase') - const { resolvePath } = require('../utils') const PinManager = require('./pin/pin-manager') const PinTypes = PinManager.PinTypes -function toB58String (hash) { - return new CID(hash).toBaseEncodedString() -} - module.exports = (self) => { const dag = self.dag const pinManager = new PinManager(self._repo, dag) const pin = { - add: callbackify(async (paths, options) => { + add: callbackify.variadic(async (paths, options) => { options = options || {} - const recursive = options.recursive == null ? true : options.recursive + const recursive = options.recursive !== false + const cids = await resolvePath(self.object, paths) + const pinAdd = async () => { + const results = [] - const multihashes = await resolvePath(self.object, paths) - const pinAdd = (pinComplete) => { // verify that each hash can be pinned - map(multihashes, (multihash, cb) => { - const cid = new CID(multihash) + for (const cid of cids) { const key = cid.toBaseEncodedString() if (recursive) { if (pinManager.recursivePins.has(key)) { // it's already pinned recursively - return cb(null, key) + results.push(key) + + continue } // entire graph of nested links should be pinned, // so make sure we have all the objects - pinManager.fetchCompleteDag(key, { preload: options.preload }, (err) => { - if (err) { return cb(err) } - // found all objects, we can add the pin - return cb(null, key) - }) + await pinManager.fetchCompleteDag(key, { preload: options.preload }) + + // found all objects, we can add the pin + results.push(key) } else { if (pinManager.recursivePins.has(key)) { // recursive supersedes direct, can't have both - return cb(new Error(`${key} already pinned recursively`)) + throw new Error(`${key} already pinned recursively`) } - if (pinManager.directPins.has(key)) { - // already directly pinned - return cb(null, key) + + if (!pinManager.directPins.has(key)) { + // make sure we have the object + await dag.get(cid, { preload: options.preload }) } - // make sure we have the object - dag.get(cid, { preload: options.preload }, (err) => { - if (err) { return cb(err) } - // found the object, we can add the pin - return cb(null, key) - }) + results.push(key) } - }, (err, results) => { - if (err) { return pinComplete(err) } - - // update the pin sets in memory - const pinset = recursive ? pinManager.recursivePins : pinManager.directPins - results.forEach(key => pinset.add(key)) - - // persist updated pin sets to datastore - pinManager.flushPins((err, root) => { - if (err) { return pinComplete(err) } - pinComplete(null, results.map(hash => ({ hash }))) - }) - }) + } + + // update the pin sets in memory + const pinset = recursive ? pinManager.recursivePins : pinManager.directPins + results.forEach(key => pinset.add(key)) + + // persist updated pin sets to datastore + await pinManager.flushPins() + + return results.map(hash => ({ hash })) } // When adding a file, we take a lock that gets released after pinning // is complete, so don't take a second lock here - const lock = options.lock !== false + const lock = Boolean(options.lock) - if (lock) { - self._gcLock.readLock(pinAdd, callback) - } else { - pinAdd(callback) + if (!lock) { + return pinAdd() } - }), - rm: promisify((paths, options, callback) => { - if (typeof options === 'function') { - callback = options + const release = await self._gcLock.readLock() + + try { + await pinAdd() + } finally { + release() } + }), + rm: callbackify.variadic(async (paths, options) => { options = options || {} const recursive = options.recursive == null ? true : options.recursive if (options.cidBase && !multibase.names.includes(options.cidBase)) { - return setImmediate(() => { - callback(errCode(new Error('invalid multibase'), 'ERR_INVALID_MULTIBASE')) - }) + throw errCode(new Error('invalid multibase'), 'ERR_INVALID_MULTIBASE') } - resolvePath(self.object, paths, (err, mhs) => { - if (err) { return callback(err) } - - self._gcLock.readLock((lockCb) => { - // verify that each hash can be unpinned - map(mhs, (multihash, cb) => { - pinManager.isPinnedWithType(multihash, PinTypes.all, (err, res) => { - if (err) { return cb(err) } - const { pinned, reason } = res - const key = toB58String(multihash) - if (!pinned) { - return cb(new Error(`${key} is not pinned`)) - } + const cids = await resolvePath(self.object, paths) + const release = await self._gcLock.readLock() + const results = [] - switch (reason) { - case (PinTypes.recursive): - if (recursive) { - return cb(null, key) - } else { - return cb(new Error(`${key} is pinned recursively`)) - } - case (PinTypes.direct): - return cb(null, key) - default: - return cb(new Error( - `${key} is pinned indirectly under ${reason}` - )) - } - }) - }, (err, results) => { - if (err) { return lockCb(err) } - - // update the pin sets in memory - results.forEach(key => { - if (recursive && pinManager.recursivePins.has(key)) { - pinManager.recursivePins.delete(key) - } else { - pinManager.directPins.delete(key) + try { + // verify that each hash can be unpinned + for (const cid of cids) { + const res = await pinManager.isPinnedWithType(cid, PinTypes.all) + + const { pinned, reason } = res + const key = cid.toBaseEncodedString() + + if (!pinned) { + throw new Error(`${key} is not pinned`) + } + + switch (reason) { + case (PinTypes.recursive): + if (!recursive) { + throw new Error(`${key} is pinned recursively`) } - }) - - // persist updated pin sets to datastore - pinManager.flushPins((err, root) => { - if (err) { return lockCb(err) } - self.log(`Removed pins: ${results}`) - lockCb(null, results.map(hash => ({ hash }))) - }) - }) - }, callback) - }) + + results.push(key) + + break + case (PinTypes.direct): + results.push(key) + + break + default: + throw new Error(`${key} is pinned indirectly under ${reason}`) + } + } + + // update the pin sets in memory + results.forEach(key => { + if (recursive && pinManager.recursivePins.has(key)) { + pinManager.recursivePins.delete(key) + } else { + pinManager.directPins.delete(key) + } + }) + + // persist updated pin sets to datastore + await pinManager.flushPins() + + self.log(`Removed pins: ${results}`) + + return results.map(hash => ({ hash })) + } finally { + release() + } }), - ls: promisify((paths, options, callback) => { + ls: callbackify.variadic(async (paths, options) => { + options = options || {} + let type = PinTypes.all - if (typeof paths === 'function') { - callback = paths - options = {} - paths = null - } - if (typeof options === 'function') { - callback = options - } + if (paths && paths.type) { options = paths paths = null } - options = options || {} - if (options.type) { type = options.type if (typeof options.type === 'string') { @@ -183,88 +161,87 @@ module.exports = (self) => { } const err = PinManager.checkPinType(type) if (err) { - return setImmediate(() => callback(err)) + throw err } } if (paths) { // check the pinned state of specific hashes - waterfall([ - (cb) => resolvePath(self.object, paths, cb), - (hashes, cb) => mapSeries(hashes, (hash, done) => pinManager.isPinnedWithType(hash, type, done), cb), - (results, cb) => { - results = results - .filter(result => result.pinned) - .map(({ key, reason }) => { - switch (reason) { - case PinTypes.direct: - case PinTypes.recursive: - return { - hash: key, - type: reason - } - default: - return { - hash: key, - type: `${PinTypes.indirect} through ${reason}` - } - } - }) - - if (!results.length) { - return cb(new Error(`path '${paths}' is not pinned`)) + const cids = await resolvePath(self.object, paths) + const results = [] + + for (const cid of cids) { + const { key, reason, pinned } = await pinManager.isPinnedWithType(cid, type) + + if (pinned) { + switch (reason) { + case PinTypes.direct: + case PinTypes.recursive: + results.push({ + hash: key, + type: reason + }) + break + default: + results.push({ + hash: key, + type: `${PinTypes.indirect} through ${reason}` + }) } - - cb(null, results) } - ], (err, results) => err ? callback(err) : callback(null, results)) // we don't want results equal [undefined] when err is present - } else { - // show all pinned items of type - let pins = [] - - if (type === PinTypes.direct || type === PinTypes.all) { - pins = pins.concat( - Array.from(pinManager.directPins).map(hash => ({ - type: PinTypes.direct, - hash - })) - ) } - if (type === PinTypes.recursive || type === PinTypes.all) { - pins = pins.concat( - Array.from(pinManager.recursivePins).map(hash => ({ - type: PinTypes.recursive, - hash - })) - ) + if (!results.length) { + throw new Error(`path '${paths}' is not pinned`) } - if (type === PinTypes.indirect || type === PinTypes.all) { - pinManager.getIndirectKeys(options, (err, indirects) => { - if (err) { return callback(err) } - pins = pins - // if something is pinned both directly and indirectly, - // report the indirect entry - .filter(({ hash }) => - !indirects.includes(hash) || - (indirects.includes(hash) && !pinManager.directPins.has(hash)) - ) - .concat(indirects.map(hash => ({ - type: PinTypes.indirect, - hash - }))) - return callback(null, pins) - }) - } else { - callback(null, pins) - } + return results + } + + // show all pinned items of type + let pins = [] + + if (type === PinTypes.direct || type === PinTypes.all) { + pins = pins.concat( + Array.from(pinManager.directPins).map(hash => ({ + type: PinTypes.direct, + hash + })) + ) + } + + if (type === PinTypes.recursive || type === PinTypes.all) { + pins = pins.concat( + Array.from(pinManager.recursivePins).map(hash => ({ + type: PinTypes.recursive, + hash + })) + ) } + + if (type === PinTypes.indirect || type === PinTypes.all) { + const indirects = await pinManager.getIndirectKeys(options) + + pins = pins + // if something is pinned both directly and indirectly, + // report the indirect entry + .filter(({ hash }) => + !indirects.includes(hash) || + (indirects.includes(hash) && !pinManager.directPins.has(hash)) + ) + .concat(indirects.map(hash => ({ + type: PinTypes.indirect, + hash + }))) + + return pins + } + + return pins }), - _isPinnedWithType: promisify(pinManager.isPinnedWithType.bind(pinManager)), - _getInternalBlocks: promisify(pinManager.getInternalBlocks.bind(pinManager)), - _load: promisify(pinManager.load.bind(pinManager)) + // used by tests + pinManager } return pin diff --git a/src/core/components/pin/gc-lock.js b/src/core/components/pin/gc-lock.js index 59cb18b497..faceea12cf 100644 --- a/src/core/components/pin/gc-lock.js +++ b/src/core/components/pin/gc-lock.js @@ -7,16 +7,18 @@ const Mutex = require('../../../utils/mutex') const log = require('debug')('ipfs:gc:lock') class GCLock { - constructor (repoOwner, options = {}) { + constructor (repoOwner, options) { + options = options || {} + this.mutex = new Mutex(repoOwner, { ...options, log }) } - readLock (lockedFn, cb) { - return this.mutex.readLock(lockedFn, cb) + readLock () { + return this.mutex.readLock() } - writeLock (lockedFn, cb) { - return this.mutex.writeLock(lockedFn, cb) + writeLock () { + return this.mutex.writeLock() } pullReadLock (lockedPullFn) { diff --git a/src/core/components/pin/gc.js b/src/core/components/pin/gc.js index 5615d3ea3d..11dbe850d3 100644 --- a/src/core/components/pin/gc.js +++ b/src/core/components/pin/gc.js @@ -1,13 +1,11 @@ 'use strict' -const promisify = require('promisify-es6') const CID = require('cids') const base32 = require('base32.js') -const parallel = require('async/parallel') -const mapLimit = require('async/mapLimit') -const expErr = require('explain-error') +const callbackify = require('callbackify') const { cidToString } = require('../../../utils/cid') const log = require('debug')('ipfs:gc') +const { default: Queue } = require('p-queue') // TODO: Use exported key from root when upgraded to ipfs-mfs@>=13 // https://github.com/ipfs/js-ipfs-mfs/pull/58 const { MFS_ROOT_KEY } = require('ipfs-mfs/src/core/utils/constants') @@ -17,136 +15,129 @@ const BLOCK_RM_CONCURRENCY = 256 // Perform mark and sweep garbage collection module.exports = function gc (self) { - return promisify((callback) => { + return callbackify(async () => { const start = Date.now() log('Creating set of marked blocks') - self._gcLock.writeLock((lockCb) => { - parallel([ + const release = await self._gcLock.writeLock() + + try { + const [ + blockKeys, markedSet + ] = await Promise.all([ // Get all blocks keys from the blockstore - (cb) => self._repo.blocks.query({ keysOnly: true }, cb), + self._repo.blocks.query({ keysOnly: true }), + // Mark all blocks that are being used - (cb) => createMarkedSet(self, cb) - ], (err, [blockKeys, markedSet]) => { - if (err) { - log('GC failed to fetch all block keys and created marked set', err) - return lockCb(err) - } + createMarkedSet(self) + ]) - // Delete blocks that are not being used - deleteUnmarkedBlocks(self, markedSet, blockKeys, (err, res) => { - log(`Complete (${Date.now() - start}ms)`) + // Delete blocks that are not being used + const res = await deleteUnmarkedBlocks(self, markedSet, blockKeys) - if (err) { - log('GC failed to delete unmarked blocks', err) - return lockCb(err) - } + log(`Complete (${Date.now() - start}ms)`) - lockCb(null, res) - }) - }) - }, callback) + return res + } finally { + release() + } }) } // Get Set of CIDs of blocks to keep -function createMarkedSet (ipfs, callback) { - parallel([ +async function createMarkedSet (ipfs) { + const output = new Set() + + const addPins = pins => { + log(`Found ${pins.length} pinned blocks`) + + pins.forEach(pin => { + output.add(cidToString(new CID(pin), { base: 'base32' })) + }) + } + + await Promise.all([ // All pins, direct and indirect - (cb) => ipfs.pin.ls((err, pins) => { - if (err) { - return cb(expErr(err, 'Could not list pinned blocks')) - } - log(`Found ${pins.length} pinned blocks`) - const cids = pins.map(p => new CID(p.hash)) - // log(' ' + cids.join('\n ')) - cb(null, cids) - }), + ipfs.pin.ls() + .then(pins => pins.map(pin => pin.hash)) + .then(addPins), // Blocks used internally by the pinner - (cb) => ipfs.pin._getInternalBlocks((err, cids) => { - if (err) { - return cb(expErr(err, 'Could not list pinner internal blocks')) - } - log(`Found ${cids.length} pinner internal blocks`) - // log(' ' + cids.join('\n ')) - cb(null, cids) - }), + ipfs.pin.pinManager.getInternalBlocks() + .then(addPins), // The MFS root and all its descendants - (cb) => ipfs._repo.root.get(MFS_ROOT_KEY, (err, mh) => { - if (err) { + ipfs._repo.root.get(MFS_ROOT_KEY) + .then(mh => getDescendants(ipfs, new CID(mh))) + .then(addPins) + .catch(err => { if (err.code === 'ERR_NOT_FOUND') { log('No blocks in MFS') - return cb(null, []) + return [] } - return cb(expErr(err, 'Could not get MFS root from datastore')) - } - getDescendants(ipfs, new CID(mh), cb) - }) - ], (err, res) => { - if (err) { - return callback(err) - } + throw err + }) + ]) - const cids = [].concat(...res).map(cid => cidToString(cid, { base: 'base32' })) - return callback(null, new Set(cids)) - }) + return output } // Recursively get descendants of the given CID -function getDescendants (ipfs, cid, callback) { - ipfs.refs(cid, { recursive: true }, (err, refs) => { - if (err) { - return callback(expErr(err, 'Could not get MFS root descendants from store')) - } - const cids = [cid, ...refs.map(r => new CID(r.ref))] - log(`Found ${cids.length} MFS blocks`) - // log(' ' + cids.join('\n ')) - callback(null, cids) - }) +async function getDescendants (ipfs, cid) { + const refs = await ipfs.refs(cid, { recursive: true }) + const cids = [cid, ...refs.map(r => new CID(r.ref))] + log(`Found ${cids.length} MFS blocks`) + // log(' ' + cids.join('\n ')) + + return cids } // Delete all blocks that are not marked as in use -function deleteUnmarkedBlocks (ipfs, markedSet, blockKeys, callback) { +async function deleteUnmarkedBlocks (ipfs, markedSet, blockKeys) { // Iterate through all blocks and find those that are not in the marked set // The blockKeys variable has the form [ { key: Key() }, { key: Key() }, ... ] const unreferenced = [] - const res = [] - let errCount = 0 - for (const { key: k } of blockKeys) { + const result = [] + + const queue = new Queue({ + concurrency: BLOCK_RM_CONCURRENCY + }) + + for await (const { key: k } of blockKeys) { try { const cid = dsKeyToCid(k) const b32 = cid.toV1().toString('base32') if (!markedSet.has(b32)) { unreferenced.push(cid) + + queue.add(async () => { + const res = { + cid + } + + try { + await ipfs._repo.blocks.delete(cid) + } catch (err) { + res.err = new Error(`Could not delete block with CID ${cid}: ${err.message}`) + } + + result.push(res) + }) } } catch (err) { - errCount++ const msg = `Could not convert block with key '${k}' to CID` log(msg, err) - res.push({ err: new Error(msg + `: ${err.message}`) }) + result.push({ err: new Error(msg + `: ${err.message}`) }) } } - const msg = `Marked set has ${markedSet.size} unique blocks. Blockstore has ${blockKeys.length} blocks. ` + - `Deleting ${unreferenced.length} blocks.` + (errCount ? ` (${errCount} errors)` : '') - log(msg) - // log(' ' + unreferenced.join('\n ')) - - mapLimit(unreferenced, BLOCK_RM_CONCURRENCY, (cid, cb) => { - // Delete blocks from blockstore - ipfs._repo.blocks.delete(cid, (err) => { - const res = { - cid, - err: err && new Error(`Could not delete block with CID ${cid}: ${err.message}`) - } - cb(null, res) - }) - }, (_, delRes) => { - callback(null, res.concat(delRes)) - }) + await queue.onIdle() + + log(`Marked set has ${markedSet.size} unique blocks. Blockstore has ${blockKeys.length} blocks. ` + + `Deleted ${unreferenced.length} blocks.`) + + return result } // TODO: Use exported utility when upgrade to ipfs-repo@>=0.27.1 diff --git a/src/core/components/pin/pin-manager.js b/src/core/components/pin/pin-manager.js index d76902eac3..703bc4ac26 100644 --- a/src/core/components/pin/pin-manager.js +++ b/src/core/components/pin/pin-manager.js @@ -3,22 +3,19 @@ const { DAGNode, DAGLink } = require('ipld-dag-pb') const CID = require('cids') -const series = require('async/series') -const parallel = require('async/parallel') -const eachLimit = require('async/eachLimit') -const waterfall = require('async/waterfall') -const detectLimit = require('async/detectLimit') -const queue = require('async/queue') +const { default: Queue } = require('p-queue') const { Key } = require('interface-datastore') const errCode = require('err-code') const multicodec = require('multicodec') +const dagCborLinks = require('dag-cbor-links') const debug = require('debug') const { cidToString } = require('../../../utils/cid') const createPinSet = require('./pin-set') // arbitrary limit to the number of concurrent dag operations -const concurrencyLimit = 300 +const WALK_DAG_CONCURRENCY_LIMIT = 300 +const IS_PINNED_WITH_TYPE_CONCURRENCY_LIMIT = 300 const PIN_DS_KEY = new Key('/local/pins') function invalidPinTypeErr (type) { @@ -43,32 +40,35 @@ class PinManager { this.recursivePins = new Set() } - _walkDag ({ cid, preload = false, onCid = () => {} }, cb) { - const q = queue(({ cid }, done) => { - this.dag.get(cid, { preload }, (err, result) => { - if (err) { - return done(err) - } + async _walkDag ({ cid, preload = false, onCid = () => {} }) { + if (!CID.isCID(cid)) { + cid = new CID(cid) + } + + const walk = (cid) => { + return async () => { + const { value: node } = await this.dag.get(cid, { preload }) onCid(cid) - if (result.value.Links) { - q.push(result.value.Links.map(link => ({ - cid: link.Hash - }))) + if (cid.codec === 'dag-pb') { + queue.addAll( + node.Links.map(link => walk(link.Hash)) + ) + } else if (cid.codec === 'dag-cbor') { + for (const [_, childCid] of dagCborLinks(node)) { // eslint-disable-line no-unused-vars + queue.add(walk(childCid)) + } } - - done() - }) - }, concurrencyLimit) - q.drain = () => { - cb() - } - q.error = (err) => { - q.kill() - cb(err) + } } - q.push({ cid }) + + const queue = new Queue({ + concurrency: WALK_DAG_CONCURRENCY_LIMIT + }) + queue.add(walk(cid)) + + await queue.onIdle() } directKeys () { @@ -79,10 +79,11 @@ class PinManager { return Array.from(this.recursivePins, key => new CID(key).buffer) } - getIndirectKeys ({ preload }, callback) { + async getIndirectKeys ({ preload }) { const indirectKeys = new Set() - eachLimit(this.recursiveKeys(), concurrencyLimit, (multihash, cb) => { - this._walkDag({ + + for (const multihash of this.recursiveKeys()) { + await this._walkDag({ cid: new CID(multihash), preload: preload || false, onCid: (cid) => { @@ -93,220 +94,180 @@ class PinManager { indirectKeys.add(cid) } } - }, cb) - }, (err) => { - if (err) { return callback(err) } - callback(null, Array.from(indirectKeys)) - }) + }) + } + + return Array.from(indirectKeys) } // Encode and write pin key sets to the datastore: // a DAGLink for each of the recursive and direct pinsets // a DAGNode holding those as DAGLinks, a kind of root pin - flushPins (callback) { - let dLink, rLink, root - series([ + async flushPins () { + const [ + dLink, + rLink + ] = await Promise.all([ // create a DAGLink to the node with direct pins - cb => waterfall([ - cb => this.pinset.storeSet(this.directKeys(), cb), - ({ node, cid }, cb) => { - try { - cb(null, new DAGLink(PinTypes.direct, node.size, cid)) - } catch (err) { - cb(err) - } - }, - (link, cb) => { dLink = link; cb(null) } - ], cb), - + this.pinset.storeSet(this.directKeys()) + .then((result) => { + return new DAGLink(PinTypes.direct, result.node.size, result.cid) + }), // create a DAGLink to the node with recursive pins - cb => waterfall([ - cb => this.pinset.storeSet(this.recursiveKeys(), cb), - ({ node, cid }, cb) => { - try { - cb(null, new DAGLink(PinTypes.recursive, node.size, cid)) - } catch (err) { - cb(err) - } - }, - (link, cb) => { rLink = link; cb(null) } - ], cb), - + this.pinset.storeSet(this.recursiveKeys()) + .then((result) => { + return new DAGLink(PinTypes.recursive, result.node.size, result.cid) + }), // the pin-set nodes link to a special 'empty' node, so make sure it exists - cb => { - let empty - - try { - empty = new DAGNode(Buffer.alloc(0)) - } catch (err) { - return cb(err) - } + this.dag.put(new DAGNode(Buffer.alloc(0)), { + version: 0, + format: multicodec.DAG_PB, + hashAlg: multicodec.SHA2_256, + preload: false + }) + ]) + + // create a root node with DAGLinks to the direct and recursive DAGs + const rootNode = new DAGNode(Buffer.alloc(0), [dLink, rLink]) + const rootCid = await this.dag.put(rootNode, { + version: 0, + format: multicodec.DAG_PB, + hashAlg: multicodec.SHA2_256, + preload: false + }) - this.dag.put(empty, { - version: 0, - format: multicodec.DAG_PB, - hashAlg: multicodec.SHA2_256, - preload: false - }, cb) - }, - - // create a root node with DAGLinks to the direct and recursive DAGs - cb => { - let node - - try { - node = new DAGNode(Buffer.alloc(0), [dLink, rLink]) - } catch (err) { - return cb(err) - } + // save root to datastore under a consistent key + await this.repo.datastore.put(PIN_DS_KEY, rootCid.buffer) - root = node - this.dag.put(root, { - version: 0, - format: multicodec.DAG_PB, - hashAlg: multicodec.SHA2_256, - preload: false - }, (err, cid) => { - if (!err) { - root.multihash = cid.buffer - } - cb(err) - }) - }, - - // save root to datastore under a consistent key - cb => this.repo.datastore.put(PIN_DS_KEY, root.multihash, cb) - ], (err, res) => { - if (err) { return callback(err) } - this.log(`Flushed pins with root: ${root}`) - return callback(null, root) - }) + this.log(`Flushed pins with root: ${rootCid}`) } - load (callback) { - waterfall([ - (cb) => this.repo.datastore.has(PIN_DS_KEY, cb), - (has, cb) => has ? cb() : cb(new Error('No pins to load')), - (cb) => this.repo.datastore.get(PIN_DS_KEY, cb), - (mh, cb) => { - this.dag.get(new CID(mh), '', { preload: false }, cb) - } - ], (err, pinRoot) => { - if (err) { - if (err.message === 'No pins to load') { - this.log('No pins to load') - return callback() - } else { - return callback(err) - } - } + async load () { + const has = await this.repo.datastore.has(PIN_DS_KEY) + + if (!has) { + return + } - parallel([ - cb => this.pinset.loadSet(pinRoot.value, PinTypes.recursive, cb), - cb => this.pinset.loadSet(pinRoot.value, PinTypes.direct, cb) - ], (err, keys) => { - if (err) { return callback(err) } - const [rKeys, dKeys] = keys + const mh = await this.repo.datastore.get(PIN_DS_KEY) + const pinRoot = await this.dag.get(new CID(mh), '', { preload: false }) - this.directPins = new Set(dKeys.map(k => cidToString(k))) - this.recursivePins = new Set(rKeys.map(k => cidToString(k))) + const [ + rKeys, dKeys + ] = await Promise.all([ + this.pinset.loadSet(pinRoot.value, PinTypes.recursive), + this.pinset.loadSet(pinRoot.value, PinTypes.direct) + ]) - this.log('Loaded pins from the datastore') - return callback(null) - }) - }) + this.directPins = new Set(dKeys.map(k => cidToString(k))) + this.recursivePins = new Set(rKeys.map(k => cidToString(k))) + + this.log('Loaded pins from the datastore') } - isPinnedWithType (multihash, type, callback) { + async isPinnedWithType (multihash, type) { const key = cidToString(multihash) const { recursive, direct, all } = PinTypes // recursive if ((type === recursive || type === all) && this.recursivePins.has(key)) { - return callback(null, { + return { key, pinned: true, reason: recursive - }) + } } if (type === recursive) { - return callback(null, { + return { key, pinned: false - }) + } } // direct if ((type === direct || type === all) && this.directPins.has(key)) { - return callback(null, { + return { key, pinned: true, reason: direct - }) + } } if (type === direct) { - return callback(null, { + return { key, pinned: false - }) + } } // indirect (default) // check each recursive key to see if multihash is under it // arbitrary limit, enables handling 1000s of pins. - detectLimit(this.recursiveKeys().map(key => new CID(key)), concurrencyLimit, (cid, cb) => { - waterfall([ - (done) => this.dag.get(cid, '', { preload: false }, done), - (result, done) => done(null, result.value), - (node, done) => this.pinset.hasDescendant(node, key, done) - ], cb) - }, (err, cid) => callback(err, { + const queue = new Queue({ + concurrency: IS_PINNED_WITH_TYPE_CONCURRENCY_LIMIT + }) + let cid + + queue.addAll( + this.recursiveKeys() + .map(childKey => { + childKey = new CID(childKey) + + return async () => { + const has = await this.pinset.hasDescendant(childKey, key) + + if (has) { + cid = childKey + queue.clear() + } + } + }) + ) + + await queue.onIdle() + + return { key, pinned: Boolean(cid), reason: cid - })) + } } // Gets CIDs of blocks used internally by the pinner - getInternalBlocks (callback) { - this.repo.datastore.get(PIN_DS_KEY, (err, mh) => { - if (err) { - if (err.code === 'ERR_NOT_FOUND') { - this.log('No pinned blocks') - return callback(null, []) - } - return callback(new Error(`Could not get pin sets root from datastore: ${err.message}`)) + async getInternalBlocks () { + let mh + + try { + mh = await this.repo.datastore.get(PIN_DS_KEY) + } catch (err) { + if (err.code === 'ERR_NOT_FOUND') { + this.log('No pinned blocks') + + return [] } - const cid = new CID(mh) - this.dag.get(cid, '', { preload: false }, (err, obj) => { - if (err) { - return callback(new Error(`Could not get pin sets from store: ${err.message}`)) - } + throw new Error(`Could not get pin sets root from datastore: ${err.message}`) + } - // The pinner stores an object that has two links to pin sets: - // 1. The directly pinned CIDs - // 2. The recursively pinned CIDs - // If large enough, these pin sets may have links to buckets to hold - // the pins - this.pinset.getInternalCids(obj.value, (err, cids) => { - if (err) { - return callback(new Error(`Could not get pinner internal cids: ${err.message}`)) - } + const cid = new CID(mh) + const obj = await this.dag.get(cid, '', { preload: false }) - callback(null, cids.concat(cid)) - }) - }) - }) + // The pinner stores an object that has two links to pin sets: + // 1. The directly pinned CIDs + // 2. The recursively pinned CIDs + // If large enough, these pin sets may have links to buckets to hold + // the pins + const cids = await this.pinset.getInternalCids(obj.value) + + return cids.concat(cid) } - fetchCompleteDag (cid, options, callback) { - this._walkDag({ + async fetchCompleteDag (cid, options) { + await this._walkDag({ cid, preload: options.preload - }, callback) + }) } // Returns an error if the pin type is invalid diff --git a/src/core/components/pin/pin-set.js b/src/core/components/pin/pin-set.js index b0cdde97b8..552fc23313 100644 --- a/src/core/components/pin/pin-set.js +++ b/src/core/components/pin/pin-set.js @@ -7,10 +7,9 @@ const fnv1a = require('fnv1a') const varint = require('varint') const { DAGNode, DAGLink } = require('ipld-dag-pb') const multicodec = require('multicodec') -const someSeries = require('async/someSeries') -const eachSeries = require('async/eachSeries') -const eachOfSeries = require('async/eachOfSeries') - +const { default: Queue } = require('p-queue') +const dagCborLinks = require('dag-cbor-links') +const log = require('debug')('ipfs:pin:pin-set') const pbSchema = require('./pin.proto') const emptyKeyHash = 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n' @@ -19,6 +18,8 @@ const defaultFanout = 256 const maxItems = 8192 const pb = protobuf(pbSchema) +const HAS_DESCENDANT_CONCURRENCY = 100 + function toB58String (hash) { return new CID(hash).toBaseEncodedString() } @@ -29,20 +30,26 @@ function readHeader (rootNode) { const rootData = rootNode.Data const hdrLength = varint.decode(rootData) const vBytes = varint.decode.bytes + if (vBytes <= 0) { throw new Error('Invalid Set header length') } + if (vBytes + hdrLength > rootData.length) { throw new Error('Impossibly large set header length') } + const hdrSlice = rootData.slice(vBytes, hdrLength + vBytes) const header = pb.Set.decode(hdrSlice) + if (header.version !== 1) { throw new Error(`Unsupported Set version: ${header.version}`) } + if (header.fanout > rootNode.Links.length) { throw new Error('Impossibly large fanout') } + return { header: header, data: rootData.slice(hdrLength + vBytes) @@ -58,45 +65,90 @@ function hash (seed, key) { return fnv1a(data.toString('binary')) } +function * cborCids (node) { + for (const [_, cid] of dagCborLinks(node)) { // eslint-disable-line no-unused-vars + yield cid + } +} + exports = module.exports = function (dag) { const pinSet = { // should this be part of `object` API? - hasDescendant: (root, childhash, callback) => { - const seen = {} + hasDescendant: async (parentCid, childhash) => { + if (parentCid.codec !== 'dag-pb' && parentCid.codec !== 'dag-cbor') { + return false + } + + const { value: root } = await dag.get(parentCid, { preload: false }) + const queue = new Queue({ + concurrency: HAS_DESCENDANT_CONCURRENCY + }) if (CID.isCID(childhash) || Buffer.isBuffer(childhash)) { childhash = toB58String(childhash) } - return searchChildren(root, callback) + let found = false + const seen = {} - function searchChildren (root, cb) { - someSeries(root.Links, (link, done) => { - const cid = link.Hash - const bs58Link = toB58String(cid) + function searchChild (linkCid) { + return async () => { + if (found) { + return + } + + try { + const { value: childNode } = await dag.get(linkCid, { preload: false }) + + searchChildren(linkCid, childNode) + } catch (err) { + log(err) + } + } + } + + function searchChildren (cid, node) { + let links = [] + + if (cid.codec === 'dag-pb') { + links = node.Links + } else if (cid.codec === 'dag-cbor') { + links = cborCids(node) + } + + for (const link of links) { + const linkCid = cid.codec === 'dag-pb' ? link.Hash : link[1] + const bs58Link = toB58String(linkCid) if (bs58Link === childhash) { - return done(null, true) + queue.clear() + found = true + + return } - if (bs58Link in seen) { - return done(null, false) + if (seen[bs58Link]) { + continue } seen[bs58Link] = true - dag.get(cid, '', { preload: false }, (err, res) => { - if (err) { - return done(err) - } + if (linkCid.codec !== 'dag-pb' && linkCid.codec !== 'dag-cbor') { + continue + } - searchChildren(res.value, done) - }) - }, cb) + queue.add(searchChild(linkCid)) + } } + + searchChildren(parentCid, root) + + await queue.onIdle() + + return found }, - storeSet: (keys, callback) => { + storeSet: async (keys) => { const pins = keys.map(key => { if (typeof key === 'string' || Buffer.isBuffer(key)) { key = new CID(key) @@ -108,25 +160,24 @@ exports = module.exports = function (dag) { } }) - pinSet.storeItems(pins, (err, rootNode) => { - if (err) { return callback(err) } - - dag.put(rootNode, { - version: 0, - format: multicodec.DAG_PB, - hashAlg: multicodec.SHA2_256, - preload: false - }, (err, cid) => { - if (err) { return callback(err, cid) } - callback(null, { node: rootNode, cid }) - }) + const rootNode = await pinSet.storeItems(pins) + const cid = await dag.put(rootNode, { + version: 0, + format: multicodec.DAG_PB, + hashAlg: multicodec.SHA2_256, + preload: false }) + + return { + node: rootNode, + cid + } }, - storeItems: (items, callback) => { - return storePins(items, 0, callback) + storeItems: async (items) => { // eslint-disable-line require-await + return storePins(items, 0) - function storePins (pins, depth, storePinsCb) { + async function storePins (pins, depth) { const pbHeader = pb.Set.encode({ version: 1, fanout: defaultFanout, @@ -136,6 +187,7 @@ exports = module.exports = function (dag) { Buffer.from(varint.encode(pbHeader.length)), pbHeader ]) const fanoutLinks = [] + for (let i = 0; i < defaultFanout; i++) { fanoutLinks.push(new DAGLink('', 1, emptyKey)) } @@ -156,15 +208,7 @@ exports = module.exports = function (dag) { [headerBuf].concat(nodes.map(item => item.data)) ) - let rootNode - - try { - rootNode = new DAGNode(rootData, rootLinks) - } catch (err) { - return storePinsCb(err) - } - - return storePinsCb(null, rootNode) + return new DAGNode(rootData, rootLinks) } else { // If the array of pins is > maxItems, we: // - distribute the pins among `defaultFanout` bins @@ -180,32 +224,21 @@ exports = module.exports = function (dag) { const n = hash(depth, pin.key) % defaultFanout bins[n] = n in bins ? bins[n].concat([pin]) : [pin] return bins - }, {}) - - eachOfSeries(bins, (bin, idx, eachCb) => { - storePins( - bin, - depth + 1, - (err, child) => storeChild(err, child, idx, eachCb) - ) - }, err => { - if (err) { return storePinsCb(err) } - - let rootNode - - try { - rootNode = new DAGNode(headerBuf, fanoutLinks) - } catch (err) { - return storePinsCb(err) - } - - return storePinsCb(null, rootNode) - }) - } + }, []) + + let idx = 0 + for (const bin of bins) { + const child = await storePins(bin, depth + 1) - function storeChild (err, child, binIdx, cb) { - if (err) { return cb(err) } + await storeChild(child, idx) + + idx++ + } + + return new DAGNode(headerBuf, fanoutLinks) + } + async function storeChild (child, binIdx) { const opts = { version: 0, format: multicodec.DAG_PB, @@ -213,41 +246,34 @@ exports = module.exports = function (dag) { preload: false } - dag.put(child, opts, (err, cid) => { - if (err) { return cb(err) } - fanoutLinks[binIdx] = new DAGLink('', child.size, cid) - cb(null) - }) + const cid = await dag.put(child, opts) + + fanoutLinks[binIdx] = new DAGLink('', child.size, cid) } } }, - loadSet: (rootNode, name, callback) => { + loadSet: async (rootNode, name) => { const link = rootNode.Links.find(l => l.Name === name) + if (!link) { - return callback(new Error('No link found with name ' + name)) + throw new Error('No link found with name ' + name) } - dag.get(link.Hash, '', { preload: false }, (err, res) => { - if (err) { return callback(err) } - const keys = [] - const stepPin = link => keys.push(link.Hash.buffer) - pinSet.walkItems(res.value, { stepPin }, err => { - if (err) { return callback(err) } - return callback(null, keys) - }) - }) + const res = await dag.get(link.Hash, '', { preload: false }) + const keys = [] + const stepPin = link => keys.push(link.Hash) + + await pinSet.walkItems(res.value, { stepPin }) + + return keys }, - walkItems: (node, { stepPin = () => {}, stepBin = () => {} }, callback) => { - let pbh - try { - pbh = readHeader(node) - } catch (err) { - return callback(err) - } + walkItems: async (node, { stepPin = () => {}, stepBin = () => {} }) => { + const pbh = readHeader(node) + let idx = 0 - eachOfSeries(node.Links, (link, idx, eachCb) => { + for (const link of node.Links) { if (idx < pbh.header.fanout) { // the first pbh.header.fanout links are fanout bins // if a fanout bin is not 'empty', dig into and walk its DAGLinks @@ -257,35 +283,35 @@ exports = module.exports = function (dag) { stepBin(link, idx, pbh.data) // walk the links of this fanout bin - return dag.get(linkHash, '', { preload: false }, (err, res) => { - if (err) { return eachCb(err) } - pinSet.walkItems(res.value, { stepPin, stepBin }, eachCb) - }) + const res = await dag.get(linkHash, '', { preload: false }) + + await pinSet.walkItems(res.value, { stepPin, stepBin }) } } else { // otherwise, the link is a pin stepPin(link, idx, pbh.data) } - eachCb(null) - }, callback) + idx++ + } }, - getInternalCids: (rootNode, callback) => { + getInternalCids: async (rootNode) => { // "Empty block" used by the pinner const cids = [new CID(emptyKey)] - const stepBin = link => cids.push(link.Hash) - eachSeries(rootNode.Links, (topLevelLink, cb) => { + + for (const topLevelLink of rootNode.Links) { cids.push(topLevelLink.Hash) - dag.get(topLevelLink.Hash, '', { preload: false }, (err, res) => { - if (err) { return cb(err) } + const res = await dag.get(topLevelLink.Hash, '', { preload: false }) - pinSet.walkItems(res.value, { stepBin }, cb) - }) - }, (err) => callback(err, cids)) + await pinSet.walkItems(res.value, { stepBin }) + } + + return cids } } + return pinSet } diff --git a/src/core/components/pre-start.js b/src/core/components/pre-start.js index b64a898eac..639b94a61f 100644 --- a/src/core/components/pre-start.js +++ b/src/core/components/pre-start.js @@ -3,12 +3,11 @@ const peerId = require('peer-id') const PeerInfo = require('peer-info') const multiaddr = require('multiaddr') -const waterfall = require('async/waterfall') const Keychain = require('libp2p-keychain') const mergeOptions = require('merge-options') const NoKeychain = require('./no-keychain') -const promisify = require('promisify-es6') const callbackify = require('callbackify') +const promisify = require('promisify-es6') /* * Load stuff from Repo into memory @@ -45,16 +44,15 @@ module.exports = function preStart (self) { } const privKey = config.Identity.PrivKey - // TODO vmx 2019-08-06: upgrade to promise based version of peer-id (>= 0.13) const id = await promisify(peerId.createFromPrivKey)(privKey) // Import the private key as 'self', if needed. if (pass) { try { - await promisify(self._keychain.findKeyByName)('self') + await self._keychain.findKeyByName('self') } catch (err) { self.log('Creating "self" key') - await promisify(self._keychain.importPeer)('self', id) + await self._keychain.importPeer('self', id) } } @@ -72,6 +70,6 @@ module.exports = function preStart (self) { }) } - await self.pin._load() + await self.pin.pinManager.load() }) } diff --git a/src/core/components/pubsub.js b/src/core/components/pubsub.js index ac6a0981cd..fff4435844 100644 --- a/src/core/components/pubsub.js +++ b/src/core/components/pubsub.js @@ -1,14 +1,20 @@ 'use strict' -const promisify = require('promisify-es6') -const setImmediate = require('async/setImmediate') -const errCode = require('err-code') - -const errPubsubDisabled = () => { - return errCode(new Error('pubsub experiment is not enabled'), 'ERR_PUBSUB_DISABLED') -} +const callbackify = require('callbackify') +const OFFLINE_ERROR = require('../utils').OFFLINE_ERROR +const errcode = require('err-code') module.exports = function pubsub (self) { + function checkOnlineAndEnabled () { + if (!self.isOnline()) { + throw errcode(new Error(OFFLINE_ERROR), 'ERR_OFFLINE') + } + + if (!self.libp2p.pubsub) { + throw errcode(new Error('pubsub is not enabled'), 'ERR_PUBSUB_DISABLED') + } + } + return { subscribe: (topic, handler, options, callback) => { if (typeof options === 'function') { @@ -16,58 +22,60 @@ module.exports = function pubsub (self) { options = {} } - if (!self.libp2p.pubsub) { - return callback - ? setImmediate(() => callback(errPubsubDisabled())) - : Promise.reject(errPubsubDisabled()) - } + if (typeof callback === 'function') { + try { + checkOnlineAndEnabled() + } catch (err) { + return callback(err) + } - if (!callback) { - return self.libp2p.pubsub.subscribe(topic, handler, options) + self.libp2p.pubsub.subscribe(topic, handler, options, callback) + return } - self.libp2p.pubsub.subscribe(topic, handler, options, callback) + checkOnlineAndEnabled() + + return self.libp2p.pubsub.subscribe(topic, handler, options) }, unsubscribe: (topic, handler, callback) => { - if (!self.libp2p.pubsub) { - return callback - ? setImmediate(() => callback(errPubsubDisabled())) - : Promise.reject(errPubsubDisabled()) - } + if (typeof callback === 'function') { + try { + checkOnlineAndEnabled() + } catch (err) { + return callback(err) + } - if (!callback) { - return self.libp2p.pubsub.unsubscribe(topic, handler) + self.libp2p.pubsub.unsubscribe(topic, handler, callback) + return } - self.libp2p.pubsub.unsubscribe(topic, handler, callback) + checkOnlineAndEnabled() + + return self.libp2p.pubsub.unsubscribe(topic, handler) }, - publish: promisify((topic, data, callback) => { - if (!self.libp2p.pubsub) { - return setImmediate(() => callback(errPubsubDisabled())) - } - self.libp2p.pubsub.publish(topic, data, callback) + publish: callbackify(async (topic, data) => { // eslint-disable-line require-await + checkOnlineAndEnabled() + + await self.libp2p.pubsub.publish(topic, data) }), - ls: promisify((callback) => { - if (!self.libp2p.pubsub) { - return setImmediate(() => callback(errPubsubDisabled())) - } - self.libp2p.pubsub.ls(callback) + ls: callbackify(async () => { // eslint-disable-line require-await + checkOnlineAndEnabled() + + return self.libp2p.pubsub.ls() }), - peers: promisify((topic, callback) => { - if (!self.libp2p.pubsub) { - return setImmediate(() => callback(errPubsubDisabled())) - } - self.libp2p.pubsub.peers(topic, callback) + peers: callbackify(async (topic) => { // eslint-disable-line require-await + checkOnlineAndEnabled() + + return self.libp2p.pubsub.peers(topic) }), setMaxListeners (n) { - if (!self.libp2p.pubsub) { - throw errPubsubDisabled() - } + checkOnlineAndEnabled() + self.libp2p.pubsub.setMaxListeners(n) } } diff --git a/src/core/components/repo.js b/src/core/components/repo.js index d7f81c1059..b8373619e9 100644 --- a/src/core/components/repo.js +++ b/src/core/components/repo.js @@ -1,14 +1,13 @@ 'use strict' -const promisify = require('promisify-es6') const repoVersion = require('ipfs-repo').repoVersion const callbackify = require('callbackify') module.exports = function repo (self) { return { - init: (bits, empty, callback) => { + init: callbackify(async (bits, empty) => { // 1. check if repo already exists - }, + }), /** * If the repo has been initialized, report the current version. @@ -41,23 +40,18 @@ module.exports = function repo (self) { gc: require('./pin/gc')(self), - stat: promisify((options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} - } + stat: callbackify.variadic(async (options) => { + options = options || {} - self._repo.stat(options, (err, stats) => { - if (err) return callback(err) + const stats = await self._repo.stat(options) - callback(null, { - numObjects: stats.numObjects, - repoSize: stats.repoSize, - repoPath: stats.repoPath, - version: stats.version.toString(), - storageMax: stats.storageMax - }) - }) + return { + numObjects: stats.numObjects, + repoSize: stats.repoSize, + repoPath: stats.repoPath, + version: stats.version.toString(), + storageMax: stats.storageMax + } }), path: () => self._repo.path diff --git a/src/core/components/resolve.js b/src/core/components/resolve.js index 497ba2447a..268952dfe7 100644 --- a/src/core/components/resolve.js +++ b/src/core/components/resolve.js @@ -40,7 +40,9 @@ module.exports = (ipfs) => { * @param {ResolveOptions} [opts={}] * @returns {Promise} */ - const resolve = async (name, opts = {}) => { + const resolve = async (name, opts) => { + opts = opts || {} + if (!isIpfs.path(name)) { throw new Error('invalid argument ' + name) } @@ -72,11 +74,12 @@ module.exports = (ipfs) => { return `/ipfs/${cidToString(value, { base: opts.cidBase })}${remainderPath ? '/' + remainderPath : ''}` } - return (name, opts = {}, cb) => { + return (name, opts, cb) => { if (typeof opts === 'function') { cb = opts opts = {} } + opts = opts || {} return nodeify(resolve(name, opts), cb) } } diff --git a/src/core/components/start.js b/src/core/components/start.js index fea348ebc7..b3ea02bfa3 100644 --- a/src/core/components/start.js +++ b/src/core/components/start.js @@ -1,42 +1,27 @@ 'use strict' -const series = require('async/series') const Bitswap = require('ipfs-bitswap') -const setImmediate = require('async/setImmediate') -const promisify = require('promisify-es6') +const callbackify = require('callbackify') const IPNS = require('../ipns') const routingConfig = require('../ipns/routing/config') const createLibp2pBundle = require('./libp2p') module.exports = (self) => { - return promisify(async (callback) => { - const done = (err) => { - if (err) { - setImmediate(() => self.emit('error', err)) - return callback(err) - } -debugger - self.state.started() - setImmediate(() => self.emit('start')) - callback() - } - + return callbackify(async () => { if (self.state.state() !== 'stopped') { - return done(new Error(`Not able to start from state: ${self.state.state()}`)) + throw new Error(`Not able to start from state: ${self.state.state()}`) } self.log('starting') - debugger self.state.start() // The repo may be closed if previously stopped - if(self._repo.closed) { + if (self._repo.closed) { await self._repo.open() } + const config = await self._repo.config.get() - debugger - console.log('vmx: start: config:', config) const libp2p = createLibp2pBundle(self, config) await libp2p.start() @@ -47,23 +32,20 @@ debugger self._bitswap = new Bitswap( self.libp2p, - self._repo.blocks, - { statsEnabled: true } + self._repo.blocks, { + statsEnabled: true + } ) - self._bitswap.start() - // NOTE vmx 2019-08-22: ipfs-bitswap isn't async/awaitified yet, hence - // do it here - self._promisifiedBitswap = { - get: promisify(self._bitswap.get.bind(self._bitswap)), - getMany: promisify(self._bitswap.getMany.bind(self._bitswap)), - put: promisify(self._bitswap.put.bind(self._bitswap)), - putMany: promisify(self._bitswap.putMany.bind(self._bitswap)), - } - self._blockService.setExchange(self._promisifiedBitswap) + await self._bitswap.start() + + self._blockService.setExchange(self._bitswap) + + await self._preload.start() + await self._ipns.republisher.start() + await self._mfsPreload.start() - self._preload.start() - self._ipns.republisher.start() - self._mfsPreload.start(done) + self.state.started() + self.emit('start') }) } diff --git a/src/core/components/stats.js b/src/core/components/stats.js index 4c8132b304..88c19b352e 100644 --- a/src/core/components/stats.js +++ b/src/core/components/stats.js @@ -1,6 +1,6 @@ 'use strict' -const promisify = require('promisify-es6') +const callbackify = require('callbackify') const Big = require('bignumber.js') const Pushable = require('pull-pushable') const human = require('human-to-milliseconds') @@ -8,34 +8,34 @@ const toStream = require('pull-stream-to-stream') const errCode = require('err-code') function bandwidthStats (self, opts) { - return new Promise((resolve, reject) => { - let stats + let stats - if (opts.peer) { - stats = self.libp2p.stats.forPeer(opts.peer) - } else if (opts.proto) { - stats = self.libp2p.stats.forProtocol(opts.proto) - } else { - stats = self.libp2p.stats.global - } + if (opts.peer) { + stats = self.libp2p.stats.forPeer(opts.peer) + } else if (opts.proto) { + stats = self.libp2p.stats.forProtocol(opts.proto) + } else { + stats = self.libp2p.stats.global + } - if (!stats) { - resolve({ - totalIn: new Big(0), - totalOut: new Big(0), - rateIn: new Big(0), - rateOut: new Big(0) - }) - return + if (!stats) { + return { + totalIn: new Big(0), + totalOut: new Big(0), + rateIn: new Big(0), + rateOut: new Big(0) } + } - resolve({ - totalIn: stats.snapshot.dataReceived, - totalOut: stats.snapshot.dataSent, - rateIn: new Big(stats.movingAverages.dataReceived['60000'].movingAverage() / 60), - rateOut: new Big(stats.movingAverages.dataSent['60000'].movingAverage() / 60) - }) - }) + const snapshot = stats.snapshot + const movingAverages = stats.movingAverages + + return { + totalIn: snapshot.dataReceived, + totalOut: snapshot.dataSent, + rateIn: new Big(movingAverages.dataReceived['60000'].movingAverage() / 60), + rateOut: new Big(movingAverages.dataSent['60000'].movingAverage() / 60) + } } module.exports = function stats (self) { @@ -60,22 +60,11 @@ module.exports = function stats (self) { } interval = setInterval(() => { - bandwidthStats(self, opts) - .then((stats) => stream.push(stats)) - .catch((err) => { - if (interval) { - clearInterval(interval) - } - stream.end(err) - }) + stream.push(bandwidthStats(self, opts)) }, value) } else { - bandwidthStats(self, opts) - .then((stats) => { - stream.push(stats) - stream.end() - }) - .catch((err) => stream.end(err)) + stream.push(bandwidthStats(self, opts)) + stream.end() } return stream.source @@ -84,17 +73,9 @@ module.exports = function stats (self) { return { bitswap: require('./bitswap')(self).stat, repo: require('./repo')(self).stat, - bw: promisify((opts, callback) => { - if (typeof opts === 'function') { - callback = opts - opts = {} - } - + bw: callbackify.variadic(async (opts) => { // eslint-disable-line require-await opts = opts || {} - - bandwidthStats(self, opts) - .then((stats) => callback(null, stats)) - .catch((err) => callback(err)) + return bandwidthStats(self, opts) }), bwReadableStream: (opts) => toStream.source(_bwPullStream(opts)), bwPullStream: _bwPullStream diff --git a/src/core/components/stop.js b/src/core/components/stop.js index b4c31e92ee..1ee7bb9518 100644 --- a/src/core/components/stop.js +++ b/src/core/components/stop.js @@ -1,12 +1,9 @@ 'use strict' -const promisify = require('promisify-es6') const callbackify = require('callbackify') module.exports = (self) => { return callbackify(async () => { - callback = callback || function noop () {} - self.log('stop') if (self.state.state() === 'stopped') { @@ -26,25 +23,18 @@ module.exports = (self) => { self.libp2p = null try { - return Promise.all([ - promisify(self._ipnd.republisher.stop)(), - promisify(self._mfsPreload.stop)(), - libp2p.stop() + await Promise.all([ + self._ipns.republisher.stop(), + self._mfsPreload.stop(), + libp2p.stop(), + self._repo.close() ]) - } catch (err) { - let closeErr - try { - await self._repo.close() - } catch (closeErr2) { - closeErr = closeErr2 - } - if (err || closeErr) { - self.emit('error', err || closeErr) - throw err || closeErr - } - self.emit('error', err) + self.state.stopped() self.emit('stop') + } catch (err) { + self.emit('error', err) + throw err } }) } diff --git a/src/core/components/swarm.js b/src/core/components/swarm.js index d41ebca9a3..45d1b8ebe5 100644 --- a/src/core/components/swarm.js +++ b/src/core/components/swarm.js @@ -1,21 +1,15 @@ 'use strict' -const promisify = require('promisify-es6') - +const callbackify = require('callbackify') const OFFLINE_ERROR = require('../utils').OFFLINE_ERROR module.exports = function swarm (self) { return { - peers: promisify((opts, callback) => { - if (typeof opts === 'function') { - callback = opts - opts = {} - } - + peers: callbackify.variadic(async (opts) => { // eslint-disable-line require-await opts = opts || {} if (!self.isOnline()) { - return callback(new Error(OFFLINE_ERROR)) + throw new Error(OFFLINE_ERROR) } const verbose = opts.v || opts.verbose @@ -40,44 +34,46 @@ module.exports = function swarm (self) { peers.push(tupple) }) - callback(null, peers) + return peers }), // all the addrs we know - addrs: promisify((callback) => { + addrs: callbackify(async () => { // eslint-disable-line require-await if (!self.isOnline()) { - return callback(new Error(OFFLINE_ERROR)) + throw new Error(OFFLINE_ERROR) } const peers = Object.values(self._peerInfoBook.getAll()) - callback(null, peers) + return peers }), - localAddrs: promisify((callback) => { + localAddrs: callbackify(async () => { // eslint-disable-line require-await if (!self.isOnline()) { - return callback(new Error(OFFLINE_ERROR)) + throw new Error(OFFLINE_ERROR) } - callback(null, self.libp2p.peerInfo.multiaddrs.toArray()) + return self.libp2p.peerInfo.multiaddrs.toArray() }), - connect: promisify((maddr, callback) => { + connect: callbackify(async (maddr) => { // eslint-disable-line require-await if (!self.isOnline()) { - return callback(new Error(OFFLINE_ERROR)) + throw new Error(OFFLINE_ERROR) } - self.libp2p.dial(maddr, callback) + return self.libp2p.dial(maddr) }), - disconnect: promisify((maddr, callback) => { + disconnect: callbackify(async (maddr) => { // eslint-disable-line require-await if (!self.isOnline()) { - return callback(new Error(OFFLINE_ERROR)) + throw new Error(OFFLINE_ERROR) } - self.libp2p.hangUp(maddr, callback) + return self.libp2p.hangUp(maddr) }), - filters: promisify((callback) => callback(new Error('Not implemented'))) + filters: callbackify(async () => { // eslint-disable-line require-await + throw new Error('Not implemented') + }) } } diff --git a/src/core/components/version.js b/src/core/components/version.js index c9b663dd6d..cc850c465d 100644 --- a/src/core/components/version.js +++ b/src/core/components/version.js @@ -1,26 +1,17 @@ 'use strict' const pkg = require('../../../package.json') -const promisify = require('promisify-es6') +const callbackify = require('callbackify') // TODO add the commit hash of the current ipfs version to the response. module.exports = function version (self) { - return promisify((opts, callback) => { - if (typeof opts === 'function') { - callback = opts - opts = {} - } - - self.repo.version((err, repoVersion) => { - if (err) { - return callback(err) - } + return callbackify(async () => { + const repoVersion = await self.repo.version() - callback(null, { - version: pkg.version, - repo: repoVersion, - commit: '' - }) - }) + return { + version: pkg.version, + repo: repoVersion, + commit: '' + } }) } diff --git a/src/core/index.js b/src/core/index.js index 4e0cd8da39..1d2483a3b9 100644 --- a/src/core/index.js +++ b/src/core/index.js @@ -102,7 +102,6 @@ class IPFS extends EventEmitter { // - for booting up a node this.init = components.init(this) this.preStart = components.preStart(this) - //debugger this.start = components.start(this) this.stop = components.stop(this) this.shutdown = this.stop diff --git a/src/core/ipns/index.js b/src/core/ipns/index.js index d15fe22982..c96fad80a2 100644 --- a/src/core/ipns/index.js +++ b/src/core/ipns/index.js @@ -1,7 +1,7 @@ 'use strict' const { createFromPrivKey } = require('peer-id') -const series = require('async/series') +const promisify = require('promisify-es6') const errcode = require('err-code') const debug = require('debug') @@ -30,7 +30,7 @@ class IPNS { try { value = normalizePath(value) - const peerId = await createFromPrivKey(privKey.bytes) + const peerId = await promisify(createFromPrivKey)(privKey.bytes) await this.publisher.publishWithEOL(privKey, value, lifetime) log(`IPNS value ${value} was published correctly`) diff --git a/src/core/ipns/path.js b/src/core/ipns/path.js index 1f68a7fe1d..0fb9e34ff7 100644 --- a/src/core/ipns/path.js +++ b/src/core/ipns/path.js @@ -8,22 +8,16 @@ log.error = debug('ipfs:ipns:path:error') // resolves the given path by parsing out protocol-specific entries // (e.g. /ipns/) and then going through the /ipfs/ entries and returning the final node -const resolvePath = (ipfsNode, name, callback) => { +const resolvePath = (ipfsNode, name) => { // ipns path if (isIPFS.ipnsPath(name)) { log(`resolve ipns path ${name}`) - return ipfsNode._ipns.resolve(name, callback) + return ipfsNode._ipns.resolve(name) } // ipfs path - ipfsNode.dag.get(name.substring('/ipfs/'.length), (err, value) => { - if (err) { - return callback(err) - } - - return callback(null, value) - }) + return ipfsNode.dag.get(name.substring('/ipfs/'.length)) } module.exports = { diff --git a/src/core/ipns/publisher.js b/src/core/ipns/publisher.js index d0dab0256b..ef4470ec36 100644 --- a/src/core/ipns/publisher.js +++ b/src/core/ipns/publisher.js @@ -3,7 +3,7 @@ const PeerId = require('peer-id') const { Key } = require('interface-datastore') const errcode = require('err-code') - +const promisify = require('promisify-es6') const debug = require('debug') const log = debug('ipfs:ipns:publisher') log.error = debug('ipfs:ipns:publisher:error') @@ -25,7 +25,7 @@ class IpnsPublisher { throw errcode(new Error('invalid private key'), 'ERR_INVALID_PRIVATE_KEY') } - const peerId = await PeerId.createFromPrivKey(privKey.bytes) + const peerId = await promisify(PeerId.createFromPrivKey)(privKey.bytes) const record = await this._updateOrCreateRecord(privKey, value, lifetime, peerId) return this._putRecordToRouting(record, peerId) @@ -39,7 +39,6 @@ class IpnsPublisher { async _putRecordToRouting (record, peerId) { if (!(PeerId.isPeerId(peerId))) { const errMsg = 'peerId received is not valid' - log.error(errMsg) throw errcode(new Error(errMsg), 'ERR_INVALID_PEER_ID') @@ -96,7 +95,6 @@ class IpnsPublisher { async _publishPublicKey (key, publicKey) { if ((!Key.isKey(key))) { const errMsg = 'datastore key does not have a valid format' - log.error(errMsg) throw errcode(new Error(errMsg), 'ERR_INVALID_DATASTORE_KEY') @@ -104,7 +102,6 @@ class IpnsPublisher { if (!publicKey || !publicKey.bytes) { const errMsg = 'one or more of the provided parameters are not defined' - log.error(errMsg) throw errcode(new Error(errMsg), 'ERR_UNDEFINED_PARAMETER') @@ -182,7 +179,6 @@ class IpnsPublisher { async _updateOrCreateRecord (privKey, value, validity, peerId) { if (!(PeerId.isPeerId(peerId))) { const errMsg = 'peerId received is not valid' - log.error(errMsg) throw errcode(new Error(errMsg), 'ERR_INVALID_PEER_ID') @@ -220,7 +216,7 @@ class IpnsPublisher { } catch (err) { const errMsg = `ipns record for ${value} could not be created` - log.error(errMsg) + log.error(err) throw errcode(new Error(errMsg), 'ERR_CREATING_IPNS_RECORD') } diff --git a/src/core/ipns/republisher.js b/src/core/ipns/republisher.js index b71a19cb6d..907fdf4709 100644 --- a/src/core/ipns/republisher.js +++ b/src/core/ipns/republisher.js @@ -4,10 +4,9 @@ const ipns = require('ipns') const crypto = require('libp2p-crypto') const PeerId = require('peer-id') const errcode = require('err-code') +const promisify = require('promisify-es6') const debug = require('debug') -const each = require('async/each') -const waterfall = require('async/waterfall') const log = debug('ipfs:ipns:republisher') log.error = debug('ipfs:ipns:republisher:error') @@ -34,144 +33,147 @@ class IpnsRepublisher { // TODO: this handler should be isolated in another module const republishHandle = { - _onCancel: null, + _task: null, + _inflightTask: null, _timeoutId: null, - runPeriodically: (fn, period) => { - republishHandle._timeoutId = setTimeout(() => { + runPeriodically: (period) => { + republishHandle._timeoutId = setTimeout(async () => { republishHandle._timeoutId = null - fn((nextPeriod) => { - // Was republish cancelled while fn was being called? - if (republishHandle._onCancel) { - return republishHandle._onCancel() - } + try { + republishHandle._inflightTask = republishHandle._task() + await republishHandle._inflightTask + // Schedule next - republishHandle.runPeriodically(fn, nextPeriod || period) - }) - }, period) + if (republishHandle._task) { + republishHandle.runPeriodically(period) + } + } catch (err) { + log.error(err) + } + }, period()) }, - cancel: (cb) => { - // Not currently running a republish, can callback immediately - if (republishHandle._timeoutId) { - clearTimeout(republishHandle._timeoutId) - return cb() - } - // Wait for republish to finish then call callback - republishHandle._onCancel = cb + cancel: async () => { + // do not run again + clearTimeout(republishHandle._timeoutId) + republishHandle._task = null + + // wait for the currently in flight task to complete + await republishHandle._inflightTask } } const { privKey } = this._peerInfo.id const { pass } = this._options + let firstRun = true + + republishHandle._task = async () => { + await this._republishEntries(privKey, pass) - republishHandle.runPeriodically((done) => { - this._republishEntries(privKey, pass, () => done(defaultBroadcastInterval)) - }, minute) + return defaultBroadcastInterval + } + republishHandle.runPeriodically(() => { + if (firstRun) { + firstRun = false + + return minute + } + + return defaultBroadcastInterval + }) this._republishHandle = republishHandle } - stop (callback) { + async stop () { const republishHandle = this._republishHandle if (!republishHandle) { - return callback(errcode(new Error('republisher is not running'), 'ERR_REPUBLISH_NOT_RUNNING')) + throw errcode(new Error('republisher is not running'), 'ERR_REPUBLISH_NOT_RUNNING') } this._republishHandle = null - republishHandle.cancel(callback) + + await republishHandle.cancel() } - _republishEntries (privateKey, pass, callback) { + async _republishEntries (privateKey, pass) { // TODO: Should use list of published entries. // We can't currently *do* that because go uses this method for now. - this._republishEntry(privateKey, (err) => { - if (err) { - const errMsg = 'cannot republish entry for the node\'s private key' + try { + await this._republishEntry(privateKey) + } catch (err) { + const errMsg = 'cannot republish entry for the node\'s private key' - log.error(errMsg) - return - } + log.error(errMsg) + return + } - // keychain needs pass to get the cryptographic keys - if (pass) { - this._keychain.listKeys((err, list) => { - if (err) { - log.error(err) - return - } + // keychain needs pass to get the cryptographic keys + if (pass) { + try { + const keys = await this._keychain.listKeys() - each(list, (key, cb) => { - waterfall([ - (cb) => this._keychain.exportKey(key.name, pass, cb), - (pem, cb) => crypto.keys.import(pem, pass, cb) - ], (err, privKey) => { - if (err) { - log.error(err) - return - } - - this._republishEntry(privKey, cb) - }) - }, (err) => { - if (err) { - log.error(err) - } - callback(null) - }) - }) - } else { - callback(null) + for (const key in keys) { + const pem = await this._keychain.exportKey(key.name, pass) + const privKey = await crypto.keys.import(pem, pass) + + await this._republishEntry(privKey) + } + } catch (err) { + log.error(err) } - }) + } } - _republishEntry (privateKey, callback) { + async _republishEntry (privateKey) { if (!privateKey || !privateKey.bytes) { - return callback(errcode(new Error('invalid private key'), 'ERR_INVALID_PRIVATE_KEY')) + throw errcode(new Error('invalid private key'), 'ERR_INVALID_PRIVATE_KEY') } - waterfall([ - (cb) => PeerId.createFromPrivKey(privateKey.bytes, cb), - (peerId, cb) => this._getPreviousValue(peerId, cb) - ], (err, value) => { - if (err) { - return callback(err.code === 'ERR_NO_ENTRY_FOUND' ? null : err) + try { + const peerId = await promisify(PeerId.createFromPrivKey)(privateKey.bytes) + const value = await this._getPreviousValue(peerId) + await this._publisher.publishWithEOL(privateKey, value, defaultRecordLifetime) + } catch (err) { + if (err.code === 'ERR_NO_ENTRY_FOUND') { + return } - this._publisher.publishWithEOL(privateKey, value, defaultRecordLifetime, callback) - }) + throw err + } } - _getPreviousValue (peerId, callback) { + async _getPreviousValue (peerId) { if (!(PeerId.isPeerId(peerId))) { - return callback(errcode(new Error('invalid peer ID'), 'ERR_INVALID_PEER_ID')) + throw errcode(new Error('invalid peer ID'), 'ERR_INVALID_PEER_ID') } - this._datastore.get(ipns.getLocalKey(peerId.id), (err, dsVal) => { - // error handling - // no need to republish - if (err && err.notFound) { - return callback(errcode(new Error(`no previous entry for record with id: ${peerId.id}`), 'ERR_NO_ENTRY_FOUND')) - } else if (err) { - return callback(err) - } + try { + const dsVal = await this._datastore.get(ipns.getLocalKey(peerId.id)) if (!Buffer.isBuffer(dsVal)) { - return callback(errcode(new Error("found ipns record that we couldn't process"), 'ERR_INVALID_IPNS_RECORD')) + throw errcode(new Error("found ipns record that we couldn't process"), 'ERR_INVALID_IPNS_RECORD') } // unmarshal data - let record try { - record = ipns.unmarshal(dsVal) + const record = ipns.unmarshal(dsVal) + + return record.value } catch (err) { log.error(err) - return callback(errcode(new Error('found ipns record that we couldn\'t convert to a value'), 'ERR_INVALID_IPNS_RECORD')) + throw errcode(new Error('found ipns record that we couldn\'t convert to a value'), 'ERR_INVALID_IPNS_RECORD') + } + } catch (err) { + // error handling + // no need to republish + if (err && err.notFound) { + throw errcode(new Error(`no previous entry for record with id: ${peerId.id}`), 'ERR_NO_ENTRY_FOUND') } - callback(null, record.value) - }) + throw err + } } } diff --git a/src/core/ipns/resolver.js b/src/core/ipns/resolver.js index 125f228eed..60a1358571 100644 --- a/src/core/ipns/resolver.js +++ b/src/core/ipns/resolver.js @@ -16,14 +16,11 @@ class IpnsResolver { this._routing = routing } - resolve (name, options, callback) { - if (typeof options === 'function') { - callback = options - options = {} - } + async resolve (name, options) { + options = options || {} if (typeof name !== 'string') { - return callback(errcode(new Error('invalid name'), 'ERR_INVALID_NAME')) + throw errcode(new Error('invalid name'), 'ERR_INVALID_NAME') } options = options || {} @@ -32,7 +29,7 @@ class IpnsResolver { const nameSegments = name.split('/') if (nameSegments.length !== 3 || nameSegments[0] !== '') { - return callback(errcode(new Error('invalid name'), 'ERR_INVALID_NAME')) + throw errcode(new Error('invalid name'), 'ERR_INVALID_NAME') } const key = nameSegments[2] @@ -44,117 +41,101 @@ class IpnsResolver { depth = defaultMaximumRecursiveDepth } - this.resolver(key, depth, (err, res) => { - if (err) { - return callback(err) - } + const res = await this.resolver(key, depth) - log(`${name} was locally resolved correctly`) - callback(null, res) - }) + log(`${name} was locally resolved correctly`) + return res } // Recursive resolver according to the specified depth - resolver (name, depth, callback) { + async resolver (name, depth) { // Exceeded recursive maximum depth if (depth === 0) { const errMsg = `could not resolve name (recursion limit of ${defaultMaximumRecursiveDepth} exceeded)` - log.error(errMsg) - return callback(errcode(new Error(errMsg), 'ERR_RESOLVE_RECURSION_LIMIT')) - } - this._resolveName(name, (err, res) => { - if (err) { - return callback(err) - } + throw errcode(new Error(errMsg), 'ERR_RESOLVE_RECURSION_LIMIT') + } - const nameSegments = res.split('/') + const res = await this._resolveName(name) + const nameSegments = res.split('/') - // If obtained a ipfs cid or recursive option is disabled - if (nameSegments[1] === 'ipfs' || !depth) { - return callback(null, res) - } + // If obtained a ipfs cid or recursive option is disabled + if (nameSegments[1] === 'ipfs' || !depth) { + return res + } - // continue recursively until depth equals 0 - this.resolver(nameSegments[2], depth - 1, callback) - }) + // continue recursively until depth equals 0 + return this.resolver(nameSegments[2], depth - 1) } // resolve ipns entries from the provided routing - _resolveName (name, callback) { - let peerId + async _resolveName (name) { + const peerId = PeerId.createFromB58String(name) + const { routingKey } = ipns.getIdKeys(peerId.toBytes()) + let record try { - peerId = PeerId.createFromB58String(name) + record = await this._routing.get(routingKey.toBuffer()) } catch (err) { - return callback(err) + log.error(err) + + if (err.code === 'ERR_NOT_FOUND') { + throw errcode(new Error(`record requested for ${name} was not found in the network`), 'ERR_NO_RECORD_FOUND') + } + + throw errcode(new Error(`unexpected error getting the ipns record ${peerId.id}`), 'ERR_UNEXPECTED_ERROR_GETTING_RECORD') } - const { routingKey, routingPubKey } = ipns.getIdKeys(peerId.toBytes()) + // IPNS entry + let ipnsEntry + try { + ipnsEntry = ipns.unmarshal(record) + } catch (err) { + log.error(err) - this._routing.get(routingKey.toBuffer(), (err, record) => { - if (err) { - log.error(err) - if (err.code !== 'ERR_NOT_FOUND') { - return callback(errcode(new Error(`unexpected error getting the ipns record ${peerId.id}`), 'ERR_UNEXPECTED_ERROR_GETTING_RECORD')) - } - return callback(errcode(new Error(`record requested was not found for ${name} (${routingKey}) in the network`), 'ERR_NO_RECORD_FOUND')) - } + throw errcode(new Error('found ipns record that we couldn\'t convert to a value'), 'ERR_INVALID_RECORD_RECEIVED') + } - // IPNS entry - let ipnsEntry - try { - ipnsEntry = ipns.unmarshal(record) - } catch (err) { - log.error(err) - return callback(errcode(new Error('found ipns record that we couldn\'t convert to a value'), 'ERR_INVALID_RECORD_RECEIVED')) - } + // if the record has a public key validate it + if (ipnsEntry.pubKey) { + return this._validateRecord(peerId, ipnsEntry) + } - // if the record has a public key validate it - if (ipnsEntry.pubKey) { - return this._validateRecord(peerId, ipnsEntry, callback) + // Otherwise, try to get the public key from routing + let pubKey + try { + pubKey = await this._routing.get(routingKey.toBuffer()) + } catch (err) { + log.error(err) + + if (err.code === 'ERR_NOT_FOUND') { + throw errcode(new Error(`public key requested for ${name} was not found in the network`), 'ERR_NO_RECORD_FOUND') } - // Otherwise, try to get the public key from routing - this._routing.get(routingKey.toBuffer(), (err, pubKey) => { - if (err) { - log.error(err) - if (err.code !== 'ERR_NOT_FOUND') { - return callback(errcode(new Error(`unexpected error getting the public key for the ipns record ${peerId.id}`), 'ERR_UNEXPECTED_ERROR_GETTING_PUB_KEY')) - } - return callback(errcode(new Error(`public key requested was not found for ${name} (${routingPubKey}) in the network`), 'ERR_NO_RECORD_FOUND')) - } - - try { - // Insert it into the peer id, in order to be validated by IPNS validator - peerId.pubKey = crypto.keys.unmarshalPublicKey(pubKey) - } catch (err) { - log.error(err) - return callback(errcode(new Error('found public key record that we couldn\'t convert to a value'), 'ERR_INVALID_PUB_KEY_RECEIVED')) - } - - this._validateRecord(peerId, ipnsEntry, callback) - }) - }) + throw errcode(new Error(`unexpected error getting the public key for the ipns record ${peerId.id}`), 'ERR_UNEXPECTED_ERROR_GETTING_PUB_KEY') + } + + try { + // Insert it into the peer id, in order to be validated by IPNS validator + peerId.pubKey = crypto.keys.unmarshalPublicKey(pubKey) + } catch (err) { + log.error(err) + + throw errcode(new Error('found public key record that we couldn\'t convert to a value'), 'ERR_INVALID_PUB_KEY_RECEIVED') + } + + return this._validateRecord(peerId, ipnsEntry) } // validate a resolved record - _validateRecord (peerId, ipnsEntry, callback) { - ipns.extractPublicKey(peerId, ipnsEntry, (err, pubKey) => { - if (err) { - return callback(err) - } + async _validateRecord (peerId, ipnsEntry) { + const pubKey = await ipns.extractPublicKey(peerId, ipnsEntry) - // IPNS entry validation - ipns.validate(pubKey, ipnsEntry, (err) => { - if (err) { - return callback(err) - } + // IPNS entry validation + await ipns.validate(pubKey, ipnsEntry) - callback(null, ipnsEntry.value.toString()) - }) - }) + return ipnsEntry.value.toString() } } diff --git a/src/core/ipns/routing/offline-datastore.js b/src/core/ipns/routing/offline-datastore.js index a76a2eebb4..715fe96464 100644 --- a/src/core/ipns/routing/offline-datastore.js +++ b/src/core/ipns/routing/offline-datastore.js @@ -3,7 +3,6 @@ const { Key } = require('interface-datastore') const { Record } = require('libp2p-record') const { encodeBase32 } = require('./utils') -const callbackify = require('callbackify') const errcode = require('err-code') const debug = require('debug') @@ -24,30 +23,28 @@ class OfflineDatastore { * @param {function(Error)} callback * @returns {void} */ - put (key, value, callback) { - return callbackify((key, value) => { - if (!Buffer.isBuffer(key)) { - throw errcode(new Error('Offline datastore key must be a buffer'), 'ERR_INVALID_KEY') - } - - if (!Buffer.isBuffer(value)) { - throw errcode(new Error('Offline datastore value must be a buffer'), 'ERR_INVALID_VALUE') - } - - let routingKey - - try { - routingKey = this._routingKey(key) - } catch (err) { - log.error(err) - throw errcode(new Error('Not possible to generate the routing key'), 'ERR_GENERATING_ROUTING_KEY') - } - - // Marshal to libp2p record as the DHT does - const record = new Record(key, value) - - return this._repo.datastore.put(routingKey, record.serialize()) - })(key, value, callback) + async put (key, value) { // eslint-disable-line require-await + if (!Buffer.isBuffer(key)) { + throw errcode(new Error('Offline datastore key must be a buffer'), 'ERR_INVALID_KEY') + } + + if (!Buffer.isBuffer(value)) { + throw errcode(new Error('Offline datastore value must be a buffer'), 'ERR_INVALID_VALUE') + } + + let routingKey + + try { + routingKey = this._routingKey(key) + } catch (err) { + log.error(err) + throw errcode(new Error('Not possible to generate the routing key'), 'ERR_GENERATING_ROUTING_KEY') + } + + // Marshal to libp2p record as the DHT does + const record = new Record(key, value) + + return this._repo.datastore.put(routingKey, record.serialize()) } /** @@ -56,34 +53,32 @@ class OfflineDatastore { * @param {function(Error, Buffer)} callback * @returns {void} */ - get (key, callback) { - return callbackify(async (key) => { - if (!Buffer.isBuffer(key)) { - throw errcode(new Error('Offline datastore key must be a buffer'), 'ERR_INVALID_KEY') - } - - let routingKey - - try { - routingKey = this._routingKey(key) - } catch (err) { - log.error(err) - throw errcode(new Error('Not possible to generate the routing key'), 'ERR_GENERATING_ROUTING_KEY') - } - - const res = await this._repo.datastore.get(routingKey) - - // Unmarshal libp2p record as the DHT does - let record - try { - record = Record.deserialize(res) - } catch (err) { - log.error(err) - throw (err) - } - - return record.value - })(key, callback) + async get (key) { + if (!Buffer.isBuffer(key)) { + throw errcode(new Error('Offline datastore key must be a buffer'), 'ERR_INVALID_KEY') + } + + let routingKey + + try { + routingKey = this._routingKey(key) + } catch (err) { + log.error(err) + throw errcode(new Error('Not possible to generate the routing key'), 'ERR_GENERATING_ROUTING_KEY') + } + + const res = await this._repo.datastore.get(routingKey) + + // Unmarshal libp2p record as the DHT does + let record + try { + record = Record.deserialize(res) + } catch (err) { + log.error(err) + throw (err) + } + + return record.value } // encode key properly - base32(/ipns/{cid}) diff --git a/src/core/ipns/routing/pubsub-datastore.js b/src/core/ipns/routing/pubsub-datastore.js index f71d3d06f6..12735bab95 100644 --- a/src/core/ipns/routing/pubsub-datastore.js +++ b/src/core/ipns/routing/pubsub-datastore.js @@ -29,47 +29,58 @@ class IpnsPubsubDatastore { * @param {function(Error)} callback * @returns {void} */ - put (key, value, callback) { - this._pubsubDs.put(key, value, callback) + async put (key, value) { // eslint-disable-line require-await + return this._pubsubDs.put(key, value) } /** * Get a value from the pubsub datastore indexed by the received key properly encoded. - * Moreover, the identifier topic is subscribed and the pubsub datastore records will be + * Also, the identifier topic is subscribed to and the pubsub datastore records will be * updated once new publishes occur. * @param {Buffer} key identifier of the value to be obtained. * @param {function(Error, Buffer)} callback * @returns {void} */ - get (key, callback) { - this._pubsubDs.get(key, (err, res) => { - // Add topic subscribed - const ns = key.slice(0, ipns.namespaceLength) + async get (key) { + let res + let err - if (ns.toString() === ipns.namespace) { - const stringifiedTopic = key.toString() - const id = toB58String(key.slice(ipns.namespaceLength)) + try { + res = await this._pubsubDs.get(key) + } catch (e) { + err = e + } - this._subscriptions[stringifiedTopic] = id + // Add topic subscribed + const ns = key.slice(0, ipns.namespaceLength) - log(`subscribed pubsub ${stringifiedTopic}: ${id}`) - } + if (ns.toString() === ipns.namespace) { + const stringifiedTopic = toB58String(key) + const id = toB58String(key.slice(ipns.namespaceLength)) - // If no data was obtained, after storing the subscription, return the error. - if (err) { - return callback(err) - } + this._subscriptions[stringifiedTopic] = id + + log(`subscribed to pubsub topic ${stringifiedTopic}, id ${id}`) + } + + // If no data was obtained, after storing the subscription, return the error. + if (err) { + throw err + } - callback(null, res) - }) + return res } // Modify subscription key to have a proper encoding - _handleSubscriptionKey (key, callback) { + _handleSubscriptionKey (key) { + if (Buffer.isBuffer(key)) { + key = toB58String(key) + } + const subscriber = this._subscriptions[key] if (!subscriber) { - return callback(errcode(new Error(`key ${key} does not correspond to a subscription`), 'ERR_INVALID_KEY')) + throw errcode(new Error(`key ${key} does not correspond to a subscription`), 'ERR_INVALID_KEY') } let keys @@ -77,21 +88,21 @@ class IpnsPubsubDatastore { keys = ipns.getIdKeys(fromB58String(subscriber)) } catch (err) { log.error(err) - return callback(err) + throw err } - callback(null, keys.routingKey.toBuffer()) + return keys.routingKey.toBuffer() } /** * Get pubsub subscriptions related to ipns. * @param {function(Error, Object)} callback - * @returns {void} + * @returns {Array} */ - getSubscriptions (callback) { + getSubscriptions () { const subscriptions = Object.values(this._subscriptions).filter(Boolean) - return callback(null, subscriptions.map((sub) => `${ipns.namespace}${sub}`)) + return subscriptions.map((sub) => `${ipns.namespace}${sub}`) } /** @@ -100,9 +111,9 @@ class IpnsPubsubDatastore { * @param {function(Error, Object)} callback * @returns {void} */ - cancel (name, callback) { + async cancel (name) { // eslint-disable-line require-await if (typeof name !== 'string') { - return callback(errcode(new Error('invalid subscription name'), 'ERR_INVALID_SUBSCRIPTION_NAME')) + throw errcode(new Error('invalid subscription name'), 'ERR_INVALID_SUBSCRIPTION_NAME') } // Trim /ipns/ prefix from the name @@ -114,26 +125,22 @@ class IpnsPubsubDatastore { // Not found topic if (!stringifiedTopic) { - return callback(null, { + return { canceled: false - }) + } } // Unsubscribe topic - try { - const bufTopic = Buffer.from(stringifiedTopic) + const bufTopic = Buffer.from(stringifiedTopic) - this._pubsubDs.unsubscribe(bufTopic) - } catch (err) { - return callback(err) - } + this._pubsubDs.unsubscribe(bufTopic) this._subscriptions[stringifiedTopic] = undefined log(`unsubscribed pubsub ${stringifiedTopic}: ${name}`) - callback(null, { + return { canceled: true - }) + } } } diff --git a/src/core/mfs-preload.js b/src/core/mfs-preload.js index dcf783ea76..9527b68d64 100644 --- a/src/core/mfs-preload.js +++ b/src/core/mfs-preload.js @@ -1,7 +1,6 @@ 'use strict' const debug = require('debug') -const setImmediate = require('async/setImmediate') const log = debug('ipfs:mfs-preload') log.error = debug('ipfs:mfs-preload:error') @@ -12,8 +11,8 @@ module.exports = (self) => { if (!options.enabled) { log('MFS preload disabled') return { - start: (cb) => setImmediate(cb), - stop: (cb) => setImmediate(cb) + start: async () => {}, + stop: async () => {} } } @@ -21,39 +20,34 @@ module.exports = (self) => { let timeoutId const preloadMfs = () => { - self.files.stat('/', (err, stats) => { - if (err) { - timeoutId = setTimeout(preloadMfs, options.interval) - return log.error('failed to stat MFS root for preload', err) - } - - if (rootCid !== stats.hash) { - log(`preloading updated MFS root ${rootCid} -> ${stats.hash}`) - - return self._preload(stats.hash, (err) => { - timeoutId = setTimeout(preloadMfs, options.interval) - if (err) return log.error(`failed to preload MFS root ${stats.hash}`, err) - rootCid = stats.hash - }) - } + self.files.stat('/') + .then((stats) => { + if (rootCid !== stats.hash) { + log(`preloading updated MFS root ${rootCid} -> ${stats.hash}`) + + return self._preload(stats.hash, (err) => { + timeoutId = setTimeout(preloadMfs, options.interval) + if (err) return log.error(`failed to preload MFS root ${stats.hash}`, err) + rootCid = stats.hash + }) + } - timeoutId = setTimeout(preloadMfs, options.interval) - }) + timeoutId = setTimeout(preloadMfs, options.interval) + }, (err) => { + timeoutId = setTimeout(preloadMfs, options.interval) + log.error('failed to stat MFS root for preload', err) + }) } return { - start (cb) { - self.files.stat('/', (err, stats) => { - if (err) return cb(err) - rootCid = stats.hash - log(`monitoring MFS root ${rootCid}`) - timeoutId = setTimeout(preloadMfs, options.interval) - cb() - }) + async start () { + const stats = await self.files.stat('/') + rootCid = stats.hash + log(`monitoring MFS root ${rootCid}`) + timeoutId = setTimeout(preloadMfs, options.interval) }, - stop (cb) { + stop () { clearTimeout(timeoutId) - cb() } } } diff --git a/src/core/runtime/add-from-fs-browser.js b/src/core/runtime/add-from-fs-browser.js index 10f9884a03..aaf9691c7c 100644 --- a/src/core/runtime/add-from-fs-browser.js +++ b/src/core/runtime/add-from-fs-browser.js @@ -3,7 +3,7 @@ const callbackify = require('callbackify') module.exports = () => { - return callbackify(() => { + return callbackify(async () => { // eslint-disable-line require-await throw new Error('not available in the browser') }) } diff --git a/src/core/runtime/add-from-fs-nodejs.js b/src/core/runtime/add-from-fs-nodejs.js index 9370020752..33bc3954e2 100644 --- a/src/core/runtime/add-from-fs-nodejs.js +++ b/src/core/runtime/add-from-fs-nodejs.js @@ -5,7 +5,7 @@ const globSource = require('ipfs-utils/src/files/glob-source') const all = require('async-iterator-all') module.exports = self => { - return callbackify.variadic((...args) => { + return callbackify.variadic(async (...args) => { // eslint-disable-line require-await const options = typeof args[args.length - 1] === 'string' ? {} : args.pop() return all(self._addAsyncIterator(globSource(...args, options), options)) diff --git a/src/core/runtime/dns-browser.js b/src/core/runtime/dns-browser.js index dd2bdba338..9715f7577b 100644 --- a/src/core/runtime/dns-browser.js +++ b/src/core/runtime/dns-browser.js @@ -4,7 +4,6 @@ const TLRU = require('../../utils/tlru') const { default: PQueue } = require('p-queue') const { default: ky } = require('ky-universal') -const nodeify = require('promise-nodeify') // Avoid sending multiple queries for the same hostname by caching results const cache = new TLRU(1000) @@ -37,12 +36,10 @@ const ipfsPath = (response) => { throw new Error(response.Message) } -module.exports = (fqdn, opts = {}, cb) => { - if (typeof opts === 'function') { - cb = opts - opts = {} - } - const resolveDnslink = async (fqdn, opts = {}) => { +module.exports = async (fqdn, opts) => { // eslint-disable-line require-await + const resolveDnslink = async (fqdn, opts) => { + opts = opts || {} + const searchParams = new URLSearchParams(opts) searchParams.set('arg', fqdn) @@ -58,5 +55,5 @@ module.exports = (fqdn, opts = {}, cb) => { return ipfsPath(response) } - return nodeify(resolveDnslink(fqdn, opts), cb) + return resolveDnslink(fqdn, opts) } diff --git a/src/core/runtime/dns-nodejs.js b/src/core/runtime/dns-nodejs.js index 62c006d69c..60f514dd2b 100644 --- a/src/core/runtime/dns-nodejs.js +++ b/src/core/runtime/dns-nodejs.js @@ -1,13 +1,14 @@ 'use strict' const dns = require('dns') -const _ = require('lodash') +const flatten = require('lodash.flatten') const isIPFS = require('is-ipfs') const errcode = require('err-code') +const promisify = require('promisify-es6') const MAX_RECURSIVE_DEPTH = 32 -module.exports = (domain, opts, callback) => { +module.exports = (domain, opts) => { // recursive is true by default, it's set to false only if explicitly passed as argument in opts const recursive = opts.recursive == null ? true : Boolean(opts.recursive) @@ -16,62 +17,58 @@ module.exports = (domain, opts, callback) => { depth = MAX_RECURSIVE_DEPTH } - return recursiveResolveDnslink(domain, depth, callback) + return recursiveResolveDnslink(domain, depth) } -function recursiveResolveDnslink (domain, depth, callback) { +async function recursiveResolveDnslink (domain, depth) { if (depth === 0) { - return callback(errcode(new Error('recursion limit exceeded'), 'ERR_DNSLINK_RECURSION_LIMIT')) + throw errcode(new Error('recursion limit exceeded'), 'ERR_DNSLINK_RECURSION_LIMIT') } - return resolveDnslink(domain) - .catch(err => { - // If the code is not ENOTFOUND or ERR_DNSLINK_NOT_FOUND or ENODATA then throw the error - if (err.code !== 'ENOTFOUND' && err.code !== 'ERR_DNSLINK_NOT_FOUND' && err.code !== 'ENODATA') throw err + let dnslinkRecord - if (domain.startsWith('_dnslink.')) { - // The supplied domain contains a _dnslink component - // Check the non-_dnslink domain - const rootDomain = domain.replace('_dnslink.', '') - return resolveDnslink(rootDomain) - } + try { + dnslinkRecord = await resolveDnslink(domain) + } catch (err) { + // If the code is not ENOTFOUND or ERR_DNSLINK_NOT_FOUND or ENODATA then throw the error + if (err.code !== 'ENOTFOUND' && err.code !== 'ERR_DNSLINK_NOT_FOUND' && err.code !== 'ENODATA') { + throw err + } + + if (domain.startsWith('_dnslink.')) { + // The supplied domain contains a _dnslink component + // Check the non-_dnslink domain + dnslinkRecord = await resolveDnslink(domain.replace('_dnslink.', '')) + } else { // Check the _dnslink subdomain const _dnslinkDomain = `_dnslink.${domain}` // If this throws then we propagate the error - return resolveDnslink(_dnslinkDomain) - }) - .then(dnslinkRecord => { - const result = dnslinkRecord.replace('dnslink=', '') - const domainOrCID = result.split('/')[2] - const isIPFSCID = isIPFS.cid(domainOrCID) + dnslinkRecord = await resolveDnslink(_dnslinkDomain) + } + } + + const result = dnslinkRecord.replace('dnslink=', '') + const domainOrCID = result.split('/')[2] + const isIPFSCID = isIPFS.cid(domainOrCID) + + if (isIPFSCID || !depth) { + return result + } - if (isIPFSCID || !depth) { - return callback(null, result) - } - return recursiveResolveDnslink(domainOrCID, depth - 1, callback) - }) - .catch(callback) + return recursiveResolveDnslink(domainOrCID, depth - 1) } -function resolveDnslink (domain) { +async function resolveDnslink (domain) { const DNSLINK_REGEX = /^dnslink=.+$/ - return new Promise((resolve, reject) => { - dns.resolveTxt(domain, (err, records) => { - if (err) return reject(err) - resolve(records) - }) - }) - .then(records => { - return _.chain(records).flatten().filter(record => { - return DNSLINK_REGEX.test(record) - }).value() - }) - .then(dnslinkRecords => { - // we now have dns text entries as an array of strings - // only records passing the DNSLINK_REGEX text are included - if (dnslinkRecords.length === 0) { - throw errcode(new Error(`No dnslink records found for domain: ${domain}`), 'ERR_DNSLINK_NOT_FOUND') - } - return dnslinkRecords[0] - }) + const records = await promisify(dns.resolveTxt)(domain) + const dnslinkRecords = flatten(records) + .filter(record => DNSLINK_REGEX.test(record)) + + // we now have dns text entries as an array of strings + // only records passing the DNSLINK_REGEX text are included + if (dnslinkRecords.length === 0) { + throw errcode(new Error(`No dnslink records found for domain: ${domain}`), 'ERR_DNSLINK_NOT_FOUND') + } + + return dnslinkRecords[0] } diff --git a/src/core/runtime/ipld-browser-all.js b/src/core/runtime/ipld-browser-all.js index be618364fa..5bc9b486fa 100644 --- a/src/core/runtime/ipld-browser-all.js +++ b/src/core/runtime/ipld-browser-all.js @@ -1,7 +1,9 @@ 'use strict' const mergeOptions = require('merge-options') -module.exports = (blockService, options = {}) => { +module.exports = (blockService, options) => { + options = options || {} + return mergeOptions.call( // ensure we have the defaults formats even if the user overrides `formats: []` { concatArrays: true }, diff --git a/src/core/runtime/ipld-browser.js b/src/core/runtime/ipld-browser.js index 31c19c141a..4d47639bbf 100644 --- a/src/core/runtime/ipld-browser.js +++ b/src/core/runtime/ipld-browser.js @@ -4,7 +4,9 @@ const ipldDagCbor = require('ipld-dag-cbor') const ipldDagPb = require('ipld-dag-pb') const ipldRaw = require('ipld-raw') -module.exports = (blockService, options = {}) => { +module.exports = (blockService, options) => { + options = options || {} + return mergeOptions.call( // ensure we have the defaults formats even if the user overrides `formats: []` { concatArrays: true }, diff --git a/src/core/runtime/ipld-nodejs.js b/src/core/runtime/ipld-nodejs.js index 0a26ac1c75..2431973c8b 100644 --- a/src/core/runtime/ipld-nodejs.js +++ b/src/core/runtime/ipld-nodejs.js @@ -36,7 +36,9 @@ const IpldFormats = { } } -module.exports = (blockService, options = {}, log) => { +module.exports = (blockService, options, log) => { + options = options || {} + return mergeOptions.call( // ensure we have the defaults formats even if the user overrides `formats: []` { concatArrays: true }, diff --git a/src/core/runtime/preload-nodejs.js b/src/core/runtime/preload-nodejs.js index edbfc7ff30..aafecc45c2 100644 --- a/src/core/runtime/preload-nodejs.js +++ b/src/core/runtime/preload-nodejs.js @@ -9,7 +9,7 @@ const setImmediate = require('async/setImmediate') const log = debug('ipfs:preload') log.error = debug('ipfs:preload:error') -module.exports = function preload (url, callback) { +module.exports = function preload (url, callback = () => {}) { log(url) try { diff --git a/src/core/utils.js b/src/core/utils.js index 98760b0338..8373797dde 100644 --- a/src/core/utils.js +++ b/src/core/utils.js @@ -1,7 +1,5 @@ 'use strict' -const promisify = require('promisify-es6') -const map = require('async/map') const isIpfs = require('is-ipfs') const CID = require('cids') @@ -73,67 +71,58 @@ const normalizePath = (pathStr) => { * - Arrays of the above * * @param {IPFS} objectAPI The IPFS object api - * @param {Described above} ipfsPaths A single or collection of ipfs-paths - * @param {Function} callback res is Array - * if no callback is passed, returns a Promise - * @return {Promise|void} + * @param {?} ipfsPaths A single or collection of ipfs-paths + * @return {Promise>} */ -const resolvePath = promisify(function (objectAPI, ipfsPaths, callback) { +const resolvePath = async function (objectAPI, ipfsPaths) { if (!Array.isArray(ipfsPaths)) { ipfsPaths = [ipfsPaths] } - map(ipfsPaths, (path, cb) => { - if (typeof path !== 'string') { - let cid + const cids = [] - try { - cid = new CID(path) - } catch (err) { - return cb(err) - } + for (const path of ipfsPaths) { + if (typeof path !== 'string') { + cids.push(new CID(path)) - return cb(null, cid.buffer) + continue } - let parsedPath - try { - parsedPath = exports.parseIpfsPath(path) - } catch (err) { - return cb(err) - } + const parsedPath = exports.parseIpfsPath(path) + let hash = new CID(parsedPath.hash) + let links = parsedPath.links - const rootHash = new CID(parsedPath.hash) - const rootLinks = parsedPath.links + if (!links.length) { + cids.push(hash) - if (!rootLinks.length) { - return cb(null, rootHash.buffer) + continue } - objectAPI.get(rootHash, follow.bind(null, rootHash, rootLinks)) - // recursively follow named links to the target node - function follow (cid, links, err, obj) { - if (err) { - return cb(err) - } + while (true) { + const obj = await objectAPI.get(hash) if (!links.length) { // done tracing, obj is the target node - return cb(null, cid.buffer) + cids.push(hash) + + break } const linkName = links[0] const nextObj = obj.Links.find(link => link.Name === linkName) if (!nextObj) { - return cb(new Error(`no link named "${linkName}" under ${cid}`)) + throw new Error(`no link named "${linkName}" under ${hash}`) } - objectAPI.get(nextObj.Hash, follow.bind(null, nextObj.Hash, links.slice(1))) + hash = nextObj.Hash + links = links.slice(1) } - }, callback) -}) + } + + return cids +} exports.normalizePath = normalizePath exports.parseIpfsPath = parseIpfsPath diff --git a/src/http/api/resources/bitswap.js b/src/http/api/resources/bitswap.js index 3b853f6d99..0a8d9debf1 100644 --- a/src/http/api/resources/bitswap.js +++ b/src/http/api/resources/bitswap.js @@ -8,7 +8,7 @@ const { parseKey } = require('./block') exports.wantlist = { validate: { query: Joi.object().keys({ - 'cid-base': Joi.string().valid(multibase.names) + 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, @@ -30,7 +30,7 @@ exports.wantlist = { exports.stat = { validate: { query: Joi.object().keys({ - 'cid-base': Joi.string().valid(multibase.names) + 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, @@ -61,7 +61,7 @@ exports.stat = { exports.unwant = { validate: { query: Joi.object().keys({ - 'cid-base': Joi.string().valid(multibase.names) + 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, diff --git a/src/http/api/resources/block.js b/src/http/api/resources/block.js index 38bfd666ab..d72f4b78bc 100644 --- a/src/http/api/resources/block.js +++ b/src/http/api/resources/block.js @@ -51,7 +51,7 @@ exports.get = { exports.put = { validate: { query: Joi.object().keys({ - 'cid-base': Joi.string().valid(multibase.names) + 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, @@ -122,7 +122,7 @@ exports.rm = { exports.stat = { validate: { query: Joi.object().keys({ - 'cid-base': Joi.string().valid(multibase.names) + 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, diff --git a/src/http/api/resources/dag.js b/src/http/api/resources/dag.js index f22fd929c3..436382bc38 100644 --- a/src/http/api/resources/dag.js +++ b/src/http/api/resources/dag.js @@ -80,8 +80,8 @@ const encodeBufferKeys = (obj, encoding) => { exports.get = { validate: { query: Joi.object().keys({ - 'data-encoding': Joi.string().valid(['text', 'base64', 'hex']).default('text'), - 'cid-base': Joi.string().valid(multibase.names) + 'data-encoding': Joi.string().valid('text', 'base64', 'hex').default('text'), + 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, @@ -132,8 +132,8 @@ exports.put = { format: Joi.string().default('cbor'), 'input-enc': Joi.string().default('json'), pin: Joi.boolean(), - hash: Joi.string().valid(Object.keys(mh.names)).default('sha2-256'), - 'cid-base': Joi.string().valid(multibase.names) + hash: Joi.string().valid(...Object.keys(mh.names)).default('sha2-256'), + 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, @@ -229,7 +229,7 @@ exports.put = { exports.resolve = { validate: { query: Joi.object().keys({ - 'cid-base': Joi.string().valid(multibase.names) + 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, diff --git a/src/http/api/resources/files-regular.js b/src/http/api/resources/files-regular.js index 3749a88999..ea711cfc18 100644 --- a/src/http/api/resources/files-regular.js +++ b/src/http/api/resources/files-regular.js @@ -156,7 +156,7 @@ exports.add = { query: Joi.object() .keys({ 'cid-version': Joi.number().integer().min(0).max(1).default(0), - 'cid-base': Joi.string().valid(multibase.names), + 'cid-base': Joi.string().valid(...multibase.names), 'raw-leaves': Joi.boolean(), 'only-hash': Joi.boolean(), pin: Joi.boolean().default(true), @@ -264,7 +264,7 @@ exports.add = { exports.ls = { validate: { query: Joi.object().keys({ - 'cid-base': Joi.string().valid(multibase.names) + 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, diff --git a/src/http/api/resources/object.js b/src/http/api/resources/object.js index 51e585f6d0..6d1bd660ee 100644 --- a/src/http/api/resources/object.js +++ b/src/http/api/resources/object.js @@ -30,7 +30,7 @@ exports.parseKey = (request, h) => { exports.new = { validate: { query: Joi.object().keys({ - 'cid-base': Joi.string().valid(multibase.names) + 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, @@ -68,7 +68,7 @@ exports.new = { exports.get = { validate: { query: Joi.object().keys({ - 'cid-base': Joi.string().valid(multibase.names) + 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, @@ -115,7 +115,7 @@ exports.get = { exports.put = { validate: { query: Joi.object().keys({ - 'cid-base': Joi.string().valid(multibase.names) + 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, @@ -197,7 +197,7 @@ exports.put = { exports.stat = { validate: { query: Joi.object().keys({ - 'cid-base': Joi.string().valid(multibase.names) + 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, @@ -245,7 +245,7 @@ exports.data = { exports.links = { validate: { query: Joi.object().keys({ - 'cid-base': Joi.string().valid(multibase.names) + 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, @@ -313,7 +313,7 @@ exports.parseKeyAndData = async (request, h) => { exports.patchAppendData = { validate: { query: Joi.object().keys({ - 'cid-base': Joi.string().valid(multibase.names) + 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, @@ -355,7 +355,7 @@ exports.patchAppendData = { exports.patchSetData = { validate: { query: Joi.object().keys({ - 'cid-base': Joi.string().valid(multibase.names) + 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, @@ -393,7 +393,7 @@ exports.patchSetData = { exports.patchAddLink = { validate: { query: Joi.object().keys({ - 'cid-base': Joi.string().valid(multibase.names) + 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, @@ -464,7 +464,7 @@ exports.patchAddLink = { exports.patchRmLink = { validate: { query: Joi.object().keys({ - 'cid-base': Joi.string().valid(multibase.names) + 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, diff --git a/src/http/api/resources/pin.js b/src/http/api/resources/pin.js index 182ba93987..576d9be88d 100644 --- a/src/http/api/resources/pin.js +++ b/src/http/api/resources/pin.js @@ -28,7 +28,7 @@ function parseArgs (request, h) { exports.ls = { validate: { query: Joi.object().keys({ - 'cid-base': Joi.string().valid(multibase.names) + 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, @@ -73,7 +73,7 @@ exports.ls = { exports.add = { validate: { query: Joi.object().keys({ - 'cid-base': Joi.string().valid(multibase.names) + 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, @@ -102,7 +102,7 @@ exports.add = { exports.rm = { validate: { query: Joi.object().keys({ - 'cid-base': Joi.string().valid(multibase.names) + 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, diff --git a/src/http/api/resources/resolve.js b/src/http/api/resources/resolve.js index 906e0f0e9d..58bcbc4385 100644 --- a/src/http/api/resources/resolve.js +++ b/src/http/api/resources/resolve.js @@ -12,7 +12,7 @@ module.exports = { query: Joi.object().keys({ recursive: Joi.boolean().default(true), arg: Joi.string().required(), - 'cid-base': Joi.string().valid(multibase.names) + 'cid-base': Joi.string().valid(...multibase.names) }).unknown() }, async handler (request, h) { diff --git a/src/utils/mutex.js b/src/utils/mutex.js index 2ff7f5e1ae..8cb3df36cc 100644 --- a/src/utils/mutex.js +++ b/src/utils/mutex.js @@ -6,7 +6,9 @@ const noop = () => {} // Wrap mortice to present a callback interface class Mutex { - constructor (repoOwner, options = {}) { + constructor (repoOwner, options) { + options = options || {} + this.mutex = mortice(options.morticeId, { singleProcess: repoOwner }) @@ -15,33 +17,11 @@ class Mutex { this.lockId = 0 } - readLock (lockedFn, cb) { - if (lockedFn && cb) { - this._lock('readLock').then(release => { - lockedFn((err, res) => { - release() - - cb(err, res) - }) - }, cb) - return - } - + readLock () { return this._lock('readLock') } - writeLock (lockedFn, cb) { - if (lockedFn && cb) { - this._lock('writeLock').then(release => { - lockedFn((err, res) => { - release() - - cb(err, res) - }) - }, cb) - return - } - + writeLock () { return this._lock('writeLock') } diff --git a/test/bootstrapers.js b/test/bootstrapers.js index 6ba8879390..e79244ca18 100644 --- a/test/bootstrapers.js +++ b/test/bootstrapers.js @@ -16,7 +16,11 @@ describe('Check that a js-ipfs node can indeed contact the bootstrappers', () => before(async () => { this.timeout(30 * 1000) - const factory = IPFSFactory.create({ type: 'proc', exec: IPFS }) + const factory = IPFSFactory.create({ + type: 'proc', + exec: IPFS, + IpfsClient: require('ipfs-http-client') + }) ipfsd = await factory.spawn({ config: { diff --git a/test/cli/bitswap.js b/test/cli/bitswap.js index 138e107354..24bc6371fb 100644 --- a/test/cli/bitswap.js +++ b/test/cli/bitswap.js @@ -23,8 +23,6 @@ describe('bitswap', () => runOn((thing) => { }) before(function (done) { - //this.timeout(60 * 1000) - PeerId.create({ bits: 512 }, (err, peer) => { expect(err).to.not.exist() peerId = peer.toB58String() @@ -33,8 +31,6 @@ describe('bitswap', () => runOn((thing) => { }) before(function (done) { - //this.timeout(2 * 60 * 1000) - const test = (cb) => { ipfs('bitswap wantlist') .then(out => cb(null, out.includes(key0) && out.includes(key1))) @@ -48,7 +44,6 @@ describe('bitswap', () => runOn((thing) => { }) it('wantlist', function () { - //this.timeout(20 * 1000) return ipfs('bitswap wantlist').then((out) => { expect(out).to.include(key0) expect(out).to.include(key1) diff --git a/test/cli/daemon.js b/test/cli/daemon.js index 1ec3379fc9..5f222ceaeb 100644 --- a/test/cli/daemon.js +++ b/test/cli/daemon.js @@ -26,7 +26,13 @@ const daemonReady = (daemon, cb) => { daemon.cancel() } }) - daemon.stderr.on('data', () => reject(new Error('Daemon didnt start'))) + daemon.stderr.on('data', (data) => { + const line = data.toString('utf8') + + if (!line.includes('ExperimentalWarning')) { + reject(new Error('Daemon didn\'t start ' + data.toString('utf8'))) + } + }) daemon.then(() => resolve(r)).catch(err => { if (r && err.killed) { return resolve(r) diff --git a/test/cli/dht.js b/test/cli/dht.js index 9c8e3c72ca..b4655bb6d4 100644 --- a/test/cli/dht.js +++ b/test/cli/dht.js @@ -10,7 +10,10 @@ chai.use(dirtyChai) const parallel = require('async/parallel') const path = require('path') const DaemonFactory = require('ipfsd-ctl') -const df = DaemonFactory.create({ type: 'js' }) +const df = DaemonFactory.create({ + type: 'js', + IpfsClient: require('ipfs-http-client') +}) const ipfsExec = require('../utils/ipfs-exec') diff --git a/test/cli/name-pubsub.js b/test/cli/name-pubsub.js index 1c665ea492..de54ca5f33 100644 --- a/test/cli/name-pubsub.js +++ b/test/cli/name-pubsub.js @@ -11,7 +11,10 @@ const parallel = require('async/parallel') const ipfsExec = require('../utils/ipfs-exec') const DaemonFactory = require('ipfsd-ctl') -const df = DaemonFactory.create({ type: 'js' }) +const df = DaemonFactory.create({ + type: 'js', + IpfsClient: require('ipfs-http-client') +}) const spawnDaemon = () => df.spawn({ exec: path.resolve(`${__dirname}/../../src/cli/bin.js`), @@ -97,16 +100,14 @@ describe('name-pubsub', () => { it('should subscribe on name resolve', function () { this.timeout(80 * 1000) - return ipfsB(`name resolve ${nodeAId.id}`) - .catch((err) => { - expect(err).to.exist() // Not available (subscribed) - + return ipfsB.fail(`name resolve ${nodeAId.id}`) + .then((err) => { + expect(err.all).to.include('was not found') return ipfsB('pubsub ls') }) .then((res) => { expect(res).to.exist() expect(res).to.have.string('/record/') // have a record ipns subscribtion - return ipfsB('name pubsub subs') }) .then((res) => { diff --git a/test/cli/ping.js b/test/cli/ping.js index 6ea6f4ce09..9eff4b094d 100644 --- a/test/cli/ping.js +++ b/test/cli/ping.js @@ -7,7 +7,10 @@ const dirtyChai = require('dirty-chai') const DaemonFactory = require('ipfsd-ctl') const ipfsExec = require('../utils/ipfs-exec') const path = require('path') -const df = DaemonFactory.create({ type: 'js' }) +const df = DaemonFactory.create({ + type: 'js', + IpfsClient: require('ipfs-http-client') +}) const expect = chai.expect chai.use(dirtyChai) diff --git a/test/cli/pubsub.js b/test/cli/pubsub.js index 8bcb709b0a..017b665ce1 100644 --- a/test/cli/pubsub.js +++ b/test/cli/pubsub.js @@ -39,7 +39,10 @@ describe('pubsub', function () { before(async function () { this.timeout(60 * 1000) - const df = DaemonFactory.create({ type: 'proc' }) + const df = DaemonFactory.create({ + type: 'proc', + IpfsClient: require('ipfs-http-client') + }) ipfsdA = await df.spawn({ exec: IPFS, initOptions: { bits: 512 }, @@ -55,7 +58,10 @@ describe('pubsub', function () { }) before(async () => { - const df = DaemonFactory.create({ type: 'js' }) + const df = DaemonFactory.create({ + type: 'js', + IpfsClient: require('ipfs-http-client') + }) ipfsdB = await df.spawn({ initOptions: { bits: 512 }, exec: path.resolve(`${__dirname}/../../src/cli/bin.js`), diff --git a/test/cli/swarm.js b/test/cli/swarm.js index c8c0206e3a..9a99cf1acc 100644 --- a/test/cli/swarm.js +++ b/test/cli/swarm.js @@ -16,7 +16,10 @@ const PeerInfo = require('peer-info') const PeerId = require('peer-id') const DaemonFactory = require('ipfsd-ctl') -const df = DaemonFactory.create({ type: 'js' }) +const df = DaemonFactory.create({ + type: 'js', + IpfsClient: require('ipfs-http-client') +}) const config = { Bootstrap: [], diff --git a/test/core/bitswap.spec.js b/test/core/bitswap.spec.js index 526be9093b..eaa368b7a2 100644 --- a/test/core/bitswap.spec.js +++ b/test/core/bitswap.spec.js @@ -19,13 +19,14 @@ const CID = require('cids') const path = require('path') const IPFSFactory = require('ipfsd-ctl') const callbackify = require('callbackify') +const IPFSHTTPClient = require('ipfs-http-client') const IPFS = require('../../src/core') function makeBlock (callback) { const d = Buffer.from(`IPFS is awesome ${hat()}`) - multihashing(d, 'sha2-256', (err, multihash) => { + callbackify(multihashing)(d, 'sha2-256', null, (err, multihash) => { if (err) { return callback(err) } @@ -83,11 +84,13 @@ function addNode (fDaemon, inProcNode, callback) { }, (err, ipfsd) => { expect(err).to.not.exist() nodes.push(ipfsd) - connectNodes(ipfsd.api, inProcNode, (err) => callback(err, ipfsd.api)) + connectNodes(ipfsd.api, inProcNode, (err) => { + callback(err, ipfsd.api) + }) }) } -describe.only('bitswap', function () { +describe('bitswap', function () { this.timeout(80 * 1000) let inProcNode // Node spawned inside this process @@ -95,8 +98,14 @@ describe.only('bitswap', function () { let fInProc before(function () { - fDaemon = IPFSFactory.create({ type: 'js' }) - fInProc = IPFSFactory.create({ type: 'proc' }) + fDaemon = IPFSFactory.create({ + type: 'js', + IpfsClient: require('ipfs-http-client') + }) + fInProc = IPFSFactory.create({ + type: 'proc', + IpfsClient: require('ipfs-http-client') + }) }) beforeEach(async function () { @@ -124,8 +133,11 @@ describe.only('bitswap', function () { const ipfsd = await fInProc.spawn({ exec: IPFS, + IPFSClient: IPFSHTTPClient, config: config, - initOptions: { bits: 512 } + initOptions: { bits: 512 }, + start: true, + init: true }) nodes.push(ipfsd) inProcNode = ipfsd.api @@ -133,7 +145,9 @@ describe.only('bitswap', function () { afterEach(async function () { this.timeout(80 * 1000) - await Promise.all(nodes.map((node) => node.stop())) + await Promise.all( + nodes.map((node) => node.stop()) + ) nodes = [] }) diff --git a/test/core/block.spec.js b/test/core/block.spec.js index 959fae2983..c4c54bc3b3 100644 --- a/test/core/block.spec.js +++ b/test/core/block.spec.js @@ -15,9 +15,10 @@ describe('block', () => { let ipfsd, ipfs before(async function () { - //this.timeout(20 * 1000) - - const factory = IPFSFactory.create({ type: 'proc' }) + const factory = IPFSFactory.create({ + type: 'proc', + IpfsClient: require('ipfs-http-client') + }) ipfsd = await factory.spawn({ exec: IPFS, diff --git a/test/core/bootstrap.spec.js b/test/core/bootstrap.spec.js index 0081958d95..e8928b0e9b 100644 --- a/test/core/bootstrap.spec.js +++ b/test/core/bootstrap.spec.js @@ -10,7 +10,10 @@ const isNode = require('detect-node') const IPFS = require('../../src') const DaemonFactory = require('ipfsd-ctl') -const df = DaemonFactory.create({ type: 'proc' }) +const df = DaemonFactory.create({ + type: 'proc', + IpfsClient: require('ipfs-http-client') +}) describe('bootstrap', () => { if (!isNode) { diff --git a/test/core/circuit-relay.js b/test/core/circuit-relay.js index 6d58176910..628544e520 100644 --- a/test/core/circuit-relay.js +++ b/test/core/circuit-relay.js @@ -13,7 +13,11 @@ const crypto = require('crypto') const IPFS = require('../../src') const DaemonFactory = require('ipfsd-ctl') -const procDf = DaemonFactory.create({ type: 'proc', exec: IPFS }) +const procDf = DaemonFactory.create({ + type: 'proc', + exec: IPFS, + IpfsClient: require('ipfs-http-client') +}) const baseConf = { Bootstrap: [], diff --git a/test/core/dag.spec.js b/test/core/dag.spec.js index 3fcbe11f5a..b5dc5b1fbf 100644 --- a/test/core/dag.spec.js +++ b/test/core/dag.spec.js @@ -15,7 +15,10 @@ describe('dag', function () { let ipfsd, ipfs before(async () => { - const factory = IPFSFactory.create({ type: 'proc' }) + const factory = IPFSFactory.create({ + type: 'proc', + IpfsClient: require('ipfs-http-client') + }) ipfsd = await factory.spawn({ exec: IPFS, diff --git a/test/core/dht.spec.js b/test/core/dht.spec.js index d698d1168e..5ce711473a 100644 --- a/test/core/dht.spec.js +++ b/test/core/dht.spec.js @@ -20,7 +20,10 @@ describe.skip('dht', () => { before(async function () { this.timeout(30 * 1000) - const factory = IPFSFactory.create({ type: 'proc' }) + const factory = IPFSFactory.create({ + type: 'proc', + IpfsClient: require('ipfs-http-client') + }) ipfsd = await factory.spawn({ exec: IPFS, diff --git a/test/core/files-sharding.spec.js b/test/core/files-sharding.spec.js index 4c9c53c073..6abcd86823 100644 --- a/test/core/files-sharding.spec.js +++ b/test/core/files-sharding.spec.js @@ -11,7 +11,10 @@ const pull = require('pull-stream') const IPFS = require('../../src/core') const DaemonFactory = require('ipfsd-ctl') -const df = DaemonFactory.create({ type: 'proc' }) +const df = DaemonFactory.create({ + type: 'proc', + IpfsClient: require('ipfs-http-client') +}) describe('files directory (sharding tests)', () => { function createTestFiles () { diff --git a/test/core/files.spec.js b/test/core/files.spec.js index 3e3af57e53..5c59e0d4db 100644 --- a/test/core/files.spec.js +++ b/test/core/files.spec.js @@ -16,7 +16,10 @@ describe('files', function () { let ipfsd, ipfs before(async () => { - const factory = IPFSFactory.create({ type: 'proc' }) + const factory = IPFSFactory.create({ + type: 'proc', + IpfsClient: require('ipfs-http-client') + }) ipfsd = await factory.spawn({ exec: IPFS, diff --git a/test/core/gc-lock.spec.js b/test/core/gc-lock.spec.js index d44255b1bd..3effa1325e 100644 --- a/test/core/gc-lock.spec.js +++ b/test/core/gc-lock.spec.js @@ -6,6 +6,7 @@ const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) +const delay = require('delay') const parallel = require('async/parallel') const pull = require('pull-stream') const pullThrough = require('pull-stream/throughs/through') @@ -14,38 +15,56 @@ const pullCollect = require('pull-stream/sinks/collect') const pullValues = require('pull-stream/sources/values') const GCLock = require('../../src/core/components/pin/gc-lock') -const cbTakeLock = (type, lock, out, id, duration) => { - return (cb) => lock[type + 'Lock']((lockCb) => { - out.push(`${type} ${id} start`) - setTimeout(() => { - out.push(`${type} ${id} end`) - lockCb() - }, duration) - }, cb) +const promiseTakeLock = (type, lock, out, id, duration) => { + return (cb) => { + lock[type + 'Lock']() + .then(async (release) => { + try { + out.push(`${type} ${id} start`) + + await delay(duration) + + out.push(`${type} ${id} end`) + } finally { + release() + } + }) + .then(() => cb()) + } } -const cbReadLock = (lock, out, id, duration) => { - return cbTakeLock('read', lock, out, id, duration) +const promiseReadLock = (lock, out, id, duration) => { + return promiseTakeLock('read', lock, out, id, duration) } -const cbWriteLock = (lock, out, id, duration) => { - return cbTakeLock('write', lock, out, id, duration) +const promiseWriteLock = (lock, out, id, duration) => { + return promiseTakeLock('write', lock, out, id, duration) } -const cbTakeLockError = (type, lock, out, errs, id, duration) => { - return (cb) => lock[type + 'Lock']((lockCb) => { - out.push(`${type} ${id} start`) - setTimeout(() => { - out.push(`${type} ${id} error`) - lockCb(new Error('err')) - }, duration) - }, (err) => { - errs.push(err) - cb() - }) +const promiseTakeLockError = (type, lock, out, errs, id, duration) => { + return (cb) => { + lock[type + 'Lock']() + .then(async (release) => { + try { + out.push(`${type} ${id} start`) + + await delay(duration) + + out.push(`${type} ${id} error`) + + const err = new Error('err') + errs.push(err) + + throw err + } finally { + release() + } + }) + .catch(() => cb()) + } } -const cbReadLockError = (lock, out, errs, id, duration) => { - return cbTakeLockError('read', lock, out, errs, id, duration) +const promiseReadLockError = (lock, out, errs, id, duration) => { + return promiseTakeLockError('read', lock, out, errs, id, duration) } -const cbWriteLockError = (lock, out, errs, id, duration) => { - return cbTakeLockError('write', lock, out, errs, id, duration) +const promiseWriteLockError = (lock, out, errs, id, duration) => { + return promiseTakeLockError('write', lock, out, errs, id, duration) } const pullTakeLock = (type, lock, out, id, duration) => { @@ -285,11 +304,11 @@ const runTests = (suiteName, { readLock, writeLock, readLockError, writeLockErro } describe('gc-lock', function () { - runTests('cb style lock', { - readLock: cbReadLock, - writeLock: cbWriteLock, - readLockError: cbReadLockError, - writeLockError: cbWriteLockError + runTests('promise style lock', { + readLock: promiseReadLock, + writeLock: promiseWriteLock, + readLockError: promiseReadLockError, + writeLockError: promiseWriteLockError }) runTests('pull stream style lock', { diff --git a/test/core/gc.spec.js b/test/core/gc.spec.js index 3fc007d30a..b6309b4fd2 100644 --- a/test/core/gc.spec.js +++ b/test/core/gc.spec.js @@ -23,14 +23,20 @@ class MutexEmitter extends Mutex { this.emitter = new EventEmitter() } - readLock (lockedFn, cb) { - this.emitter.emit('readLock request') - return super.readLock(lockedFn, cb) + readLock () { + setTimeout(() => { + this.emitter.emit('readLock request') + }, 100) + + return super.readLock() } - writeLock (lockedFn, cb) { - this.emitter.emit('writeLock request') - return super.writeLock(lockedFn, cb) + writeLock () { + setTimeout(() => { + this.emitter.emit('writeLock request') + }, 100) + + return super.writeLock() } } @@ -55,7 +61,11 @@ describe('gc', function () { let lockEmitter before(async function () { - const factory = IPFSFactory.create({ type: 'proc', exec: IPFS }) + const factory = IPFSFactory.create({ + type: 'proc', + exec: IPFS, + IpfsClient: require('ipfs-http-client') + }) const config = { Bootstrap: [] } if (env.isNode) { diff --git a/test/core/init.spec.js b/test/core/init.spec.js index 706de3d6b6..5a9c893839 100644 --- a/test/core/init.spec.js +++ b/test/core/init.spec.js @@ -34,48 +34,32 @@ describe('init', () => { afterEach((done) => repo.teardown(done)) - it('basic', (done) => { - ipfs.init({ bits: 512, pass: hat() }, (err) => { - expect(err).to.not.exist() + it('basic', async () => { + await ipfs.init({ bits: 512, pass: hat() }) - repo.exists((err, res) => { - expect(err).to.not.exist() - expect(res).to.equal(true) - - repo.config.get((err, config) => { - expect(err).to.not.exist() - expect(config.Identity).to.exist() - expect(config.Keychain).to.exist() - done() - }) - }) - }) + const res = await repo.exists() + expect(res).to.equal(true) + + const config = await repo.config.get() + + expect(config.Identity).to.exist() + expect(config.Keychain).to.exist() }) - it('set # of bits in key', function (done) { + it('set # of bits in key', async function () { this.timeout(40 * 1000) - ipfs.init({ bits: 1024, pass: hat() }, (err) => { - expect(err).to.not.exist() + await ipfs.init({ bits: 1024, pass: hat() }) - repo.config.get((err, config) => { - expect(err).to.not.exist() - expect(config.Identity.PrivKey.length).is.above(256) - done() - }) - }) + const config = await repo.config.get() + expect(config.Identity.PrivKey.length).is.above(256) }) - it('pregenerated key is being used', (done) => { - ipfs.init({ privateKey }, (err) => { - expect(err).to.not.exist() + it('pregenerated key is being used', async () => { + await ipfs.init({ privateKey }) - repo.config.get((err, config) => { - expect(err).to.not.exist() - expect(config.Identity.PeerID).is.equal('QmRsooYQasV5f5r834NSpdUtmejdQcpxXkK6qsozZWEihC') - done() - }) - }) + const config = await repo.config.get() + expect(config.Identity.PeerID).is.equal('QmRsooYQasV5f5r834NSpdUtmejdQcpxXkK6qsozZWEihC') }) it('init docs are written', (done) => { diff --git a/test/core/kad-dht.node.js b/test/core/kad-dht.node.js index ad88dac668..032df5289a 100644 --- a/test/core/kad-dht.node.js +++ b/test/core/kad-dht.node.js @@ -10,7 +10,10 @@ const path = require('path') const parallel = require('async/parallel') const IPFSFactory = require('ipfsd-ctl') -const f = IPFSFactory.create({ type: 'js' }) +const f = IPFSFactory.create({ + type: 'js', + IpfsClient: require('ipfs-http-client') +}) const config = { Bootstrap: [], diff --git a/test/core/mfs-preload.spec.js b/test/core/mfs-preload.spec.js index 8f432da8ea..61de724f8a 100644 --- a/test/core/mfs-preload.spec.js +++ b/test/core/mfs-preload.spec.js @@ -7,17 +7,21 @@ const dirtyChai = require('dirty-chai') const expect = chai.expect chai.use(dirtyChai) -const waitFor = require('../utils/wait-for') +const delay = require('delay') +const waitFor = require('../utils/wait-for').promises const mfsPreload = require('../../src/core/mfs-preload') const createMockFilesStat = (cids = []) => { let n = 0 - return (path, cb) => cb(null, { hash: cids[n++] || 'QmHash' }) + return () => { + return Promise.resolve({ hash: cids[n++] || 'QmHash' }) + } } const createMockPreload = () => { - const preload = (cid, cb) => { + const preload = (cid, cb = () => {}) => { preload.cids.push(cid) + cb() } preload.cids = [] @@ -47,7 +51,7 @@ describe('MFS preload', () => { } }) - it('should preload MFS root periodically', function (done) { + it('should preload MFS root periodically', async function () { this.timeout(80 * 1000) mockIpfs._options.preload.enabled = true @@ -56,36 +60,30 @@ describe('MFS preload', () => { const expectedPreloadCids = ['QmSame', 'QmUpdated'] const preloader = mfsPreload(mockIpfs) - preloader.start((err) => { - expect(err).to.not.exist() + await preloader.start() - const test = (cb) => { - // Slice off any extra CIDs it processed - const cids = mockPreload.cids.slice(0, expectedPreloadCids.length) - if (cids.length !== expectedPreloadCids.length) return cb(null, false) - cb(null, cids.every((cid, i) => cid === expectedPreloadCids[i])) + const test = () => { + // Slice off any extra CIDs it processed + const cids = mockPreload.cids.slice(0, expectedPreloadCids.length) + + if (cids.length !== expectedPreloadCids.length) { + return false } - waitFor(test, { name: 'CIDs to be preloaded' }, (err) => { - expect(err).to.not.exist() - preloader.stop(done) - }) - }) + return cids.every((cid, i) => cid === expectedPreloadCids[i]) + } + + await waitFor(test, { name: 'CIDs to be preloaded' }) + await preloader.stop() }) - it('should disable preloading MFS', function (done) { + it('should disable preloading MFS', async () => { mockIpfs._options.preload.enabled = false const preloader = mfsPreload(mockIpfs) - - preloader.start((err) => { - expect(err).to.not.exist() - - setTimeout(() => { - expect(mockPreload.cids).to.be.empty() - - done() - }, 500) - }) + await preloader.start() + await delay(500) + expect(mockPreload.cids).to.be.empty() + await preloader.stop() }) }) diff --git a/test/core/name-pubsub.js b/test/core/name-pubsub.js index 208cc2be1e..8d433441aa 100644 --- a/test/core/name-pubsub.js +++ b/test/core/name-pubsub.js @@ -12,7 +12,7 @@ const base64url = require('base64url') const { fromB58String } = require('multihashes') const retry = require('async/retry') const series = require('async/series') - +const callbackify = require('callbackify') const peerId = require('peer-id') const isNode = require('detect-node') const ipns = require('ipns') @@ -21,7 +21,10 @@ const waitFor = require('../utils/wait-for') const delay = require('interface-ipfs-core/src/utils/delay') const DaemonFactory = require('ipfsd-ctl') -const df = DaemonFactory.create({ type: 'proc' }) +const df = DaemonFactory.create({ + type: 'proc', + IpfsClient: require('ipfs-http-client') +}) const namespace = '/record/' const ipfsRef = '/ipfs/QmPFVLPmp9zv5Z5KUqLhe2EivAGccQW2r7M7jhVJGLZoZU' @@ -103,7 +106,7 @@ describe('name-pubsub', function () { } if (!res || !res.length) { - return next(new Error('Could not find subscription')) + return next(new Error(`Could not find subscription for topic ${topic}`)) } return next(null, res) @@ -204,14 +207,13 @@ describe('name-pubsub', function () { const pubKeyPeerId = res[4] expect(pubKeyPeerId.toB58String()).not.to.equal(messageKey.toB58String()) - expect(pubKeyPeerId.toB58String()).to.equal(testAccount.id) expect(publishedMessage.from).to.equal(idA.id) expect(messageKey.toB58String()).to.equal(idA.id) expect(publishedMessageDataValue).to.equal(ipfsRef) // Verify the signature - ipns.validate(pubKeyPeerId._pubKey, publishedMessageData, (err) => { + callbackify(ipns.validate.bind(ipns))(pubKeyPeerId._pubKey, publishedMessageData, (err) => { expect(err).to.not.exist() done() }) diff --git a/test/core/name.spec.js b/test/core/name.spec.js index edef66c500..abbce2379b 100644 --- a/test/core/name.spec.js +++ b/test/core/name.spec.js @@ -20,7 +20,10 @@ const PubsubDatastore = require('../../src/core/ipns/routing/pubsub-datastore') const { Key } = require('interface-datastore') const DaemonFactory = require('ipfsd-ctl') -const df = DaemonFactory.create({ type: 'proc' }) +const df = DaemonFactory.create({ + type: 'proc', + IpfsClient: require('ipfs-http-client') +}) const ipfsRef = '/ipfs/QmPFVLPmp9zv5Z5KUqLhe2EivAGccQW2r7M7jhVJGLZoZU' @@ -210,137 +213,128 @@ describe('name', function () { } }) - it('should error to publish if does not receive private key', function (done) { - node._ipns.publisher.publish(null, ipfsRef, (err) => { - expect(err).to.exist() - expect(err.code).to.equal('ERR_INVALID_PRIVATE_KEY') - done() - }) + it('should error to publish if does not receive private key', function () { + return node._ipns.publisher.publish(null, ipfsRef) + .then(() => expect.fail('should have thrown when private key was missing'), (err) => { + expect(err).to.exist() + expect(err.code).to.equal('ERR_INVALID_PRIVATE_KEY') + }) }) - it('should error to publish if an invalid private key is received', function (done) { - node._ipns.publisher.publish({ bytes: 'not that valid' }, ipfsRef, (err) => { - expect(err).to.exist() - done() - }) + it('should error to publish if an invalid private key is received', function () { + return node._ipns.publisher.publish({ bytes: 'not that valid' }, ipfsRef) + .then(() => expect.fail('should have thrown when private key was invalid'), (err) => { + expect(err).to.exist() + }) }) - it('should error to publish if _updateOrCreateRecord fails', function (done) { + it('should error to publish if _updateOrCreateRecord fails', function () { const stub = sinon.stub(node._ipns.publisher, '_updateOrCreateRecord').callsArgWith(4, 'error') - node.name.publish(ipfsRef, { resolve: false }, (err) => { - expect(err).to.exist() + return node.name.publish(ipfsRef, { resolve: false }) + .then(() => expect.fail('should have thrown when _updateOrCreateRecord fails'), (err) => { + expect(err).to.exist() - stub.restore() - done() - }) + stub.restore() + }) }) - it('should error to publish if _putRecordToRouting receives an invalid peer id', function (done) { - node._ipns.publisher._putRecordToRouting(undefined, undefined, (err) => { - expect(err).to.exist() - done() - }) + it('should error to publish if _putRecordToRouting receives an invalid peer id', function () { + return node._ipns.publisher._putRecordToRouting(undefined, undefined) + .then(() => expect.fail('should have thrown if peer id was invalid'), (err) => { + expect(err).to.exist() + }) }) - it('should error to publish if receives an invalid datastore key', function (done) { + it('should error to publish if receives an invalid datastore key', function () { const stub = sinon.stub(Key, 'isKey').returns(false) - node.name.publish(ipfsRef, { resolve: false }, (err) => { - expect(err).to.exist() - expect(err.code).to.equal('ERR_INVALID_DATASTORE_KEY') + return node.name.publish(ipfsRef, { resolve: false }) + .then(() => expect.fail('should have thrown if datastore key was invalid'), (err) => { + expect(err).to.exist() + expect(err.code).to.equal('ERR_INVALID_DATASTORE_KEY') - stub.restore() - done() - }) + stub.restore() + }) }) - it('should error to publish if we receive a unexpected error getting from datastore', function (done) { + it('should error to publish if we receive a unexpected error getting from datastore', function () { const stub = sinon.stub(node._ipns.publisher._datastore, 'get').callsArgWith(1, 'error-unexpected') - node.name.publish(ipfsRef, { resolve: false }, (err) => { - expect(err).to.exist() - expect(err.code).to.equal('ERR_DETERMINING_PUBLISHED_RECORD') + return node.name.publish(ipfsRef, { resolve: false }) + .then(() => expect.fail('should have thrown if an unexpected error was received when getting from the datastore'), (err) => { + expect(err).to.exist() + expect(err.code).to.equal('ERR_DETERMINING_PUBLISHED_RECORD') - stub.restore() - done() - }) + stub.restore() + }) }) - it('should error to publish if we receive a unexpected error putting to datastore', function (done) { + it('should error to publish if we receive a unexpected error putting to datastore', function () { const stub = sinon.stub(node._ipns.publisher._datastore, 'put').callsArgWith(2, 'error-unexpected') - node.name.publish(ipfsRef, { resolve: false }, (err) => { - expect(err).to.exist() - expect(err.code).to.equal('ERR_STORING_IN_DATASTORE') + return node.name.publish(ipfsRef, { resolve: false }) + .then(() => expect.fail('should have thrown if an unexpected error was received when putting to the datastore'), (err) => { + expect(err).to.exist() + expect(err.code).to.equal('ERR_STORING_IN_DATASTORE') - stub.restore() - done() - }) + stub.restore() + }) }) - it('should error to resolve if the received name is not a string', function (done) { - node._ipns.resolver.resolve(false, (err) => { - expect(err).to.exist() - expect(err.code).to.equal('ERR_INVALID_NAME') - done() - }) + it('should error to resolve if the received name is not a string', function () { + return node._ipns.resolver.resolve(false) + .then(() => expect.fail('should have thrown if the received name is not a string'), (err) => { + expect(err).to.exist() + expect(err.code).to.equal('ERR_INVALID_NAME') + }) }) - it('should error to resolve if receives an invalid ipns path', function (done) { - node._ipns.resolver.resolve('ipns/', (err) => { - expect(err).to.exist() - expect(err.code).to.equal('ERR_INVALID_NAME') - done() - }) + it('should error to resolve if receives an invalid ipns path', function () { + return node._ipns.resolver.resolve('ipns/') + .then(() => expect.fail('should have thrown if the IPNS path was invalid'), (err) => { + expect(err).to.exist() + expect(err.code).to.equal('ERR_INVALID_NAME') + }) }) - it('should publish and then fail to resolve if receive error getting from datastore', function (done) { + it('should publish and then fail to resolve if receive error getting from datastore', async function () { const stub = sinon.stub(node._ipns.resolver._routing, 'get').callsArgWith(1, 'error-unexpected') - node.name.publish(ipfsRef, { resolve: false }, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() + await node.name.publish(ipfsRef, { resolve: false }) - node.name.resolve(nodeId, { nocache: true }, (err) => { + return node.name.resolve(nodeId, { nocache: true }) + .then(() => expect.fail('should have thrown when an invalid response was received from the datastore'), (err) => { expect(err).to.exist() expect(err.code).to.equal('ERR_UNEXPECTED_ERROR_GETTING_RECORD') stub.restore() - done() }) - }) }) - it('should publish and then fail to resolve if does not find the record', function (done) { + it('should publish and then fail to resolve if does not find the record', async function () { const stub = sinon.stub(node._ipns.resolver._routing, 'get').callsArgWith(1, { code: 'ERR_NOT_FOUND' }) - node.name.publish(ipfsRef, { resolve: false }, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() + await node.name.publish(ipfsRef, { resolve: false }) - node.name.resolve(nodeId, { nocache: true }, (err) => { + return node.name.resolve(nodeId, { nocache: true }) + .then(() => expect.fail('should have thrown when failing to find the record after publish'), (err) => { expect(err).to.exist() - expect(err.code).to.equal('ERR_NO_RECORD_FOUND') + expect(err.code).to.equal('ERR_UNEXPECTED_ERROR_GETTING_RECORD') stub.restore() - done() }) - }) }) - it('should publish and then fail to resolve if does not receive a buffer', function (done) { + it('should publish and then fail to resolve if does not receive a buffer', async function () { const stub = sinon.stub(node._ipns.resolver._routing, 'get').callsArgWith(1, undefined, 'data') - node.name.publish(ipfsRef, { resolve: false }, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() + await node.name.publish(ipfsRef, { resolve: false }) - node.name.resolve(nodeId, { nocache: true }, (err) => { + return node.name.resolve(nodeId, { nocache: true }) + .then(() => expect.fail('should have thrown if a buffer was not recieved'), (err) => { expect(err).to.exist() - expect(err.code).to.equal('ERR_INVALID_RECORD_RECEIVED') + expect(err.code).to.equal('ERR_UNEXPECTED_ERROR_GETTING_RECORD') stub.restore() - done() }) - }) }) }) @@ -384,34 +378,22 @@ describe('name', function () { } }) - it('should resolve an ipfs path correctly', function (done) { - node.add(fixture, (err, res) => { - expect(err).to.not.exist() + it('should resolve an ipfs path correctly', async function () { + const res = await node.add(fixture) - node.name.publish(`/ipfs/${res[0].hash}`, (err) => { - expect(err).to.not.exist() + await node.name.publish(`/ipfs/${res[0].hash}`) - ipnsPath.resolvePath(node, `/ipfs/${res[0].hash}`, (err, value) => { - expect(err).to.not.exist() - expect(value).to.exist() - done() - }) - }) - }) + const value = await ipnsPath.resolvePath(node, `/ipfs/${res[0].hash}`) + + expect(value).to.exist() }) - it('should resolve an ipns path correctly', function (done) { - node.add(fixture, (err, res) => { - expect(err).to.not.exist() - node.name.publish(`/ipfs/${res[0].hash}`, (err) => { - expect(err).to.not.exist() - ipnsPath.resolvePath(node, `/ipns/${nodeId}`, (err, value) => { - expect(err).to.not.exist() - expect(value).to.exist() - done() - }) - }) - }) + it('should resolve an ipns path correctly', async function () { + const res = await node.add(fixture) + await node.name.publish(`/ipfs/${res[0].hash}`) + const value = await ipnsPath.resolvePath(node, `/ipns/${nodeId}`) + + expect(value).to.exist() }) }) diff --git a/test/core/object.spec.js b/test/core/object.spec.js index a1655b8646..789b32af8a 100644 --- a/test/core/object.spec.js +++ b/test/core/object.spec.js @@ -17,7 +17,10 @@ describe('object', function () { let ipfsd, ipfs before(async function () { - const factory = IPFSFactory.create({ type: 'proc' }) + const factory = IPFSFactory.create({ + type: 'proc', + IpfsClient: require('ipfs-http-client') + }) ipfsd = await factory.spawn({ exec: IPFS, diff --git a/test/core/pin-set.js b/test/core/pin-set.js index 9bad46ce7b..cf1b0263bf 100644 --- a/test/core/pin-set.js +++ b/test/core/pin-set.js @@ -17,7 +17,7 @@ const { DAGNode } = require('ipld-dag-pb') const CID = require('cids') - +const callbackify = require('callbackify') const IPFS = require('../../src/core') const createPinSet = require('../../src/core/components/pin/pin-set') const createTempRepo = require('../utils/create-repo-nodejs') @@ -87,7 +87,14 @@ describe('pinSet', function () { preload: { enabled: false } }) ipfs.on('ready', () => { - pinSet = createPinSet(ipfs.dag) + const ps = createPinSet(ipfs.dag) + pinSet = { + storeSet: callbackify(ps.storeSet.bind(ps)), + loadSet: callbackify(ps.loadSet.bind(ps)), + hasDescendant: callbackify(ps.hasDescendant.bind(ps)), + walkItems: callbackify(ps.walkItems.bind(ps)), + getInternalCids: callbackify(ps.getInternalCids.bind(ps)) + } done() }) }) @@ -140,7 +147,7 @@ describe('pinSet', function () { const hashes = loaded.map(l => new CID(l).toBaseEncodedString()) // just check the first node, assume all are children if successful - pinSet.hasDescendant(result.node, hashes[0], (err, has) => { + pinSet.hasDescendant(result.cid, hashes[0], (err, has) => { expect(err).to.not.exist() expect(has).to.eql(true) done() diff --git a/test/core/pin.js b/test/core/pin.js index b789024b24..fd67c25936 100644 --- a/test/core/pin.js +++ b/test/core/pin.js @@ -9,6 +9,9 @@ chai.use(dirtyChai) const fs = require('fs') +const { + DAGNode +} = require('ipld-dag-pb') const CID = require('cids') const IPFS = require('../../src/core') const createTempRepo = require('../utils/create-repo-nodejs') @@ -51,7 +54,7 @@ describe('pin', function () { type = pinTypes.all } - return pin._isPinnedWithType(hash, type) + return pin.pinManager.isPinnedWithType(hash, type) .then(result => { expect(result.pinned).to.eql(pinned) if (type === pinTypes.indirect) { @@ -108,13 +111,13 @@ describe('pin', function () { it('when node is pinned', function () { return pin.add(pins.solarWiki) - .then(() => pin._isPinnedWithType(pins.solarWiki, pinTypes.all)) + .then(() => pin.pinManager.isPinnedWithType(pins.solarWiki, pinTypes.all)) .then(pinned => expect(pinned.pinned).to.eql(true)) }) it('when node is not in datastore', function () { const falseHash = `${pins.root.slice(0, -2)}ss` - return pin._isPinnedWithType(falseHash, pinTypes.all) + return pin.pinManager.isPinnedWithType(falseHash, pinTypes.all) .then(pinned => { expect(pinned.pinned).to.eql(false) expect(pinned.reason).to.eql(undefined) @@ -127,7 +130,7 @@ describe('pin', function () { }) it('when pinned recursively', function () { - return pin._isPinnedWithType(pins.root, pinTypes.recursive) + return pin.pinManager.isPinnedWithType(pins.root, pinTypes.recursive) .then(result => { expect(result.pinned).to.eql(true) expect(result.reason).to.eql(pinTypes.recursive) @@ -135,7 +138,7 @@ describe('pin', function () { }) it('when pinned indirectly', function () { - return pin._isPinnedWithType(pins.mercuryWiki, pinTypes.indirect) + return pin.pinManager.isPinnedWithType(pins.mercuryWiki, pinTypes.indirect) .then(result => { expect(result.pinned).to.eql(true) expect(result.reason.toBaseEncodedString()).to.eql(pins.root) @@ -145,7 +148,7 @@ describe('pin', function () { it('when pinned directly', function () { return pin.add(pins.mercuryDir, { recursive: false }) .then(() => { - return pin._isPinnedWithType(pins.mercuryDir, pinTypes.direct) + return pin.pinManager.isPinnedWithType(pins.mercuryDir, pinTypes.direct) .then(result => { expect(result.pinned).to.eql(true) expect(result.reason).to.eql(pinTypes.direct) @@ -155,7 +158,7 @@ describe('pin', function () { it('when not pinned', function () { return clearPins() - .then(() => pin._isPinnedWithType(pins.mercuryDir, pinTypes.direct)) + .then(() => pin.pinManager.isPinnedWithType(pins.mercuryDir, pinTypes.direct)) .then(pin => expect(pin.pinned).to.eql(false)) }) }) @@ -435,9 +438,71 @@ describe('pin', function () { return clearPins() .then(() => pin.add(pins.mercuryWiki)) }) - .then(() => pin._load()) + .then(() => pin.pinManager.load()) .then(() => pin.ls()) .then(ls => expect(ls.length).to.eql(1)) }) }) + + describe('non-dag-pb nodes', function () { + it('pins dag-cbor', async () => { + const cid = await ipfs.dag.put({}, { + format: 'dag-cbor', + hashAlg: 'sha2-256' + }) + + await pin.add(cid) + + const pins = await pin.ls() + + expect(pins).to.deep.include({ + type: 'recursive', + hash: cid.toString() + }) + }) + + it('pins raw', async () => { + const cid = await ipfs.dag.put(Buffer.alloc(0), { + format: 'raw', + hashAlg: 'sha2-256' + }) + + await pin.add(cid) + + const pins = await pin.ls() + + expect(pins).to.deep.include({ + type: 'recursive', + hash: cid.toString() + }) + }) + + it('pins dag-cbor with dag-pb child', async () => { + const child = await ipfs.dag.put(new DAGNode(Buffer.alloc(0)), { + format: 'dag-pb', + hashAlg: 'sha2-256' + }) + const parent = await ipfs.dag.put({ + child + }, { + format: 'dag-cbor', + hashAlg: 'sha2-256' + }) + + await pin.add(parent, { + recursive: true + }) + + const pins = await pin.ls() + + expect(pins).to.deep.include({ + hash: parent.toString(), + type: 'recursive' + }) + expect(pins).to.deep.include({ + hash: child.toString(), + type: 'indirect' + }) + }) + }) }) diff --git a/test/core/pin.spec.js b/test/core/pin.spec.js index e9b88fa090..467593dc03 100644 --- a/test/core/pin.spec.js +++ b/test/core/pin.spec.js @@ -15,7 +15,10 @@ describe('pin', function () { let ipfsd, ipfs before(async () => { - const factory = IPFSFactory.create({ type: 'proc' }) + const factory = IPFSFactory.create({ + type: 'proc', + IpfsClient: require('ipfs-http-client') + }) ipfsd = await factory.spawn({ exec: IPFS, diff --git a/test/core/ping.spec.js b/test/core/ping.spec.js index f05b97d556..05163a6b5e 100644 --- a/test/core/ping.spec.js +++ b/test/core/ping.spec.js @@ -12,11 +12,13 @@ const path = require('path') const expect = chai.expect chai.use(dirtyChai) const df = DaemonFactory.create({ - exec: path.resolve(`${__dirname}/../../src/cli/bin.js`) + exec: path.resolve(`${__dirname}/../../src/cli/bin.js`), + IpfsClient: require('ipfs-http-client') }) const dfProc = DaemonFactory.create({ exec: require('../../'), - type: 'proc' + type: 'proc', + IpfsClient: require('ipfs-http-client') }) const config = { diff --git a/test/core/stats.spec.js b/test/core/stats.spec.js index 680d8a0e23..3771a0b2ce 100644 --- a/test/core/stats.spec.js +++ b/test/core/stats.spec.js @@ -16,7 +16,10 @@ describe('stats', function () { let ipfsd, ipfs before(async () => { - const factory = IPFSFactory.create({ type: 'proc' }) + const factory = IPFSFactory.create({ + type: 'proc', + IpfsClient: require('ipfs-http-client') + }) ipfsd = await factory.spawn({ exec: IPFS, diff --git a/test/core/swarm.spec.js b/test/core/swarm.spec.js index c037092a1b..7002ec3edd 100644 --- a/test/core/swarm.spec.js +++ b/test/core/swarm.spec.js @@ -15,7 +15,10 @@ describe('swarm', function () { let ipfsd, ipfs before(async () => { - const factory = IPFSFactory.create({ type: 'proc' }) + const factory = IPFSFactory.create({ + type: 'proc', + IpfsClient: require('ipfs-http-client') + }) ipfsd = await factory.spawn({ exec: IPFS, diff --git a/test/core/utils.js b/test/core/utils.js index bf9bb4060a..e0c620608a 100644 --- a/test/core/utils.js +++ b/test/core/utils.js @@ -133,60 +133,50 @@ describe('utils', () => { after(done => repo.teardown(done)) - it('handles base58 hash format', (done) => { - utils.resolvePath(node.object, rootHash, (err, hashes) => { - expect(err).to.not.exist() - expect(hashes.length).to.equal(1) - expect(hashes[0]).to.deep.equal(rootMultihash) - done() - }) + it('handles base58 hash format', async () => { + const hashes = await utils.resolvePath(node.object, rootHash) + + expect(hashes.length).to.equal(1) + expect(hashes[0].buffer).to.deep.equal(rootMultihash) }) - it('handles multihash format', (done) => { - utils.resolvePath(node.object, aboutMultihash, (err, hashes) => { - expect(err).to.not.exist() - expect(hashes.length).to.equal(1) - expect(hashes[0]).to.deep.equal(aboutMultihash) - done() - }) + it('handles multihash format', async () => { + const hashes = await utils.resolvePath(node.object, aboutMultihash) + + expect(hashes.length).to.equal(1) + expect(hashes[0].buffer).to.deep.equal(aboutMultihash) }) - it('handles ipfs paths format', function (done) { + it('handles ipfs paths format', async function () { this.timeout(200 * 1000) - utils.resolvePath(node.object, aboutPath, (err, hashes) => { - expect(err).to.not.exist() - expect(hashes.length).to.equal(1) - expect(hashes[0]).to.deep.equal(aboutMultihash) - done() - }) + const hashes = await utils.resolvePath(node.object, aboutPath) + + expect(hashes.length).to.equal(1) + expect(hashes[0].buffer).to.deep.equal(aboutMultihash) }) - it('handles an array', (done) => { + it('handles an array', async () => { const paths = [rootHash, rootPath, rootMultihash] - utils.resolvePath(node.object, paths, (err, hashes) => { - expect(err).to.not.exist() - expect(hashes.length).to.equal(3) - expect(hashes[0]).to.deep.equal(rootMultihash) - expect(hashes[1]).to.deep.equal(rootMultihash) - expect(hashes[2]).to.deep.equal(rootMultihash) - done() - }) + const hashes = await utils.resolvePath(node.object, paths) + + expect(hashes.length).to.equal(3) + expect(hashes[0].buffer).to.deep.equal(rootMultihash) + expect(hashes[1].buffer).to.deep.equal(rootMultihash) + expect(hashes[2].buffer).to.deep.equal(rootMultihash) }) - it('should error on invalid hashes', function (done) { - utils.resolvePath(node.object, '/ipfs/asdlkjahsdfkjahsdfd', err => { - expect(err).to.exist() - done() - }) + it('should error on invalid hashes', () => { + return utils.resolvePath(node.object, '/ipfs/asdlkjahsdfkjahsdfd') + .then(() => expect.fail('should have errored'), (err) => expect(err).to.exist()) }) - it('should error when a link doesn\'t exist', function (done) { - utils.resolvePath(node.object, `${aboutPath}/fusion`, err => { - expect(err.message).to.include( - 'no link named "fusion" under QmbJCNKXJqVK8CzbjpNFz2YekHwh3CSHpBA86uqYg3sJ8q' - ) - done() - }) + it('should error when a link doesn\'t exist', () => { + return utils.resolvePath(node.object, `${aboutPath}/fusion`) + .then(() => expect.fail('should have errored'), (err) => { + expect(err.message).to.include( + 'no link named "fusion" under QmbJCNKXJqVK8CzbjpNFz2YekHwh3CSHpBA86uqYg3sJ8q' + ) + }) }) }) }) diff --git a/test/http-api/block.js b/test/http-api/block.js index f2242a6cd6..8fc06f8d44 100644 --- a/test/http-api/block.js +++ b/test/http-api/block.js @@ -10,7 +10,10 @@ const multihash = require('multihashes') const waterfall = require('async/waterfall') const DaemonFactory = require('ipfsd-ctl') -const df = DaemonFactory.create({ exec: path.resolve(`${__dirname}/../../src/cli/bin.js`) }) +const df = DaemonFactory.create({ + exec: path.resolve(`${__dirname}/../../src/cli/bin.js`), + IpfsClient: require('ipfs-http-client') +}) describe('block endpoint', () => { let ipfs = null diff --git a/test/http-api/bootstrap.js b/test/http-api/bootstrap.js index 07d675b67c..d227496d5d 100644 --- a/test/http-api/bootstrap.js +++ b/test/http-api/bootstrap.js @@ -8,7 +8,10 @@ const expect = chai.expect chai.use(dirtyChai) const path = require('path') const DaemonFactory = require('ipfsd-ctl') -const df = DaemonFactory.create({ exec: path.resolve(`${__dirname}/../../src/cli/bin.js`) }) +const df = DaemonFactory.create({ + exec: path.resolve(`${__dirname}/../../src/cli/bin.js`), + IpfsClient: require('ipfs-http-client') +}) describe('bootstrap endpoint', () => { let ipfs = null diff --git a/test/http-api/config.js b/test/http-api/config.js index bed3701419..a27439825f 100644 --- a/test/http-api/config.js +++ b/test/http-api/config.js @@ -17,7 +17,10 @@ const fs = require('fs') const path = require('path') const DaemonFactory = require('ipfsd-ctl') -const df = DaemonFactory.create({ exec: path.resolve(`${__dirname}/../../src/cli/bin.js`) }) +const df = DaemonFactory.create({ + exec: path.resolve(`${__dirname}/../../src/cli/bin.js`), + IpfsClient: require('ipfs-http-client') +}) skipOnWindows('config endpoint', () => { const repoExample = path.join(__dirname, '../fixtures/go-ipfs-repo') diff --git a/test/http-api/dns.js b/test/http-api/dns.js index 60d35afc15..2003a838c0 100644 --- a/test/http-api/dns.js +++ b/test/http-api/dns.js @@ -7,7 +7,10 @@ const expect = chai.expect chai.use(dirtyChai) const path = require('path') const DaemonFactory = require('ipfsd-ctl') -const df = DaemonFactory.create({ exec: path.resolve(`${__dirname}/../../src/cli/bin.js`) }) +const df = DaemonFactory.create({ + exec: path.resolve(`${__dirname}/../../src/cli/bin.js`), + IpfsClient: require('ipfs-http-client') +}) describe('dns endpoint', () => { let ipfs = null diff --git a/test/http-api/files.js b/test/http-api/files.js index 0fc29fa5ee..fc68c07266 100644 --- a/test/http-api/files.js +++ b/test/http-api/files.js @@ -12,7 +12,10 @@ const { FILE_TYPES } = require('ipfs-mfs') const path = require('path') -const df = DaemonFactory.create({ exec: path.resolve(`${__dirname}/../../src/cli/bin.js`) }) +const df = DaemonFactory.create({ + exec: path.resolve(`${__dirname}/../../src/cli/bin.js`), + IpfsClient: require('ipfs-http-client') +}) describe('.files', () => { let ipfs = null diff --git a/test/http-api/id.js b/test/http-api/id.js index 88e44f9ed9..9804b6b343 100644 --- a/test/http-api/id.js +++ b/test/http-api/id.js @@ -15,7 +15,10 @@ const isWindows = require('../utils/platforms').isWindows const skipOnWindows = isWindows() ? describe.skip : describe const DaemonFactory = require('ipfsd-ctl') -const df = DaemonFactory.create({ exec: path.resolve(`${__dirname}/../../src/cli/bin.js`) }) +const df = DaemonFactory.create({ + exec: path.resolve(`${__dirname}/../../src/cli/bin.js`), + IpfsClient: require('ipfs-http-client') +}) skipOnWindows('id endpoint', () => { const repoExample = path.join(__dirname, '../fixtures/go-ipfs-repo') diff --git a/test/node.js b/test/node.js index 97b2eeaaef..840700960b 100644 --- a/test/node.js +++ b/test/node.js @@ -1,6 +1,6 @@ 'use strict' require('./cli') -//require('./http-api') -//require('./gateway') -//require('./core/node.js') +require('./http-api') +require('./gateway') +require('./core/node.js') diff --git a/test/utils/create-repo-browser.js b/test/utils/create-repo-browser.js index 18b220b7bd..db7ef23914 100644 --- a/test/utils/create-repo-browser.js +++ b/test/utils/create-repo-browser.js @@ -3,6 +3,7 @@ const IPFSRepo = require('ipfs-repo') const hat = require('hat') +const callbackify = require('callbackify') const idb = self.indexedDB || self.mozIndexedDB || @@ -14,13 +15,18 @@ function createTempRepo (repoPath) { const repo = new IPFSRepo(repoPath) - repo.teardown = (done) => { - repo.close(() => { - idb.deleteDatabase(repoPath) - idb.deleteDatabase(repoPath + '/blocks') - done() - }) - } + repo.teardown = callbackify(async () => { + try { + await repo.close() + } catch (err) { + if (!err.message.includes('already closed')) { + throw err + } + } + + idb.deleteDatabase(repoPath) + idb.deleteDatabase(repoPath + '/blocks') + }) return repo } diff --git a/test/utils/create-repo-nodejs.js b/test/utils/create-repo-nodejs.js index 365823e824..1699c48166 100644 --- a/test/utils/create-repo-nodejs.js +++ b/test/utils/create-repo-nodejs.js @@ -5,18 +5,24 @@ const clean = require('./clean') const os = require('os') const path = require('path') const hat = require('hat') -const series = require('async/series') +const callbackify = require('callbackify') function createTempRepo (repoPath) { repoPath = repoPath || path.join(os.tmpdir(), '/ipfs-test-' + hat()) const repo = new IPFSRepo(repoPath) - repo.teardown = async (done) => { - await repo.close() + repo.teardown = callbackify(async () => { + try { + await repo.close() + } catch (err) { + if (!err.message.includes('already closed')) { + throw err + } + } + await clean(repoPath) - done() - } + }) return repo } diff --git a/test/utils/interface-common-factory.js b/test/utils/interface-common-factory.js index 0b2d55fe75..b0a245fb0b 100644 --- a/test/utils/interface-common-factory.js +++ b/test/utils/interface-common-factory.js @@ -33,8 +33,7 @@ function createFactory (options) { } const ipfsFactory = IPFSFactory.create(options.factoryOptions) - const callbackifiedSpawn = callbackify.variadic( - ipfsFactory.spawn.bind(ipfsFactory)) + const callbackifiedSpawn = callbackify.variadic(ipfsFactory.spawn.bind(ipfsFactory)) return function createCommon () { const nodes = [] diff --git a/test/utils/ipfs-exec.js b/test/utils/ipfs-exec.js index 94630a18e4..0714526e96 100644 --- a/test/utils/ipfs-exec.js +++ b/test/utils/ipfs-exec.js @@ -3,7 +3,6 @@ const execa = require('execa') const chai = require('chai') const dirtyChai = require('dirty-chai') -const expect = chai.expect chai.use(dirtyChai) const path = require('path') const _ = require('lodash') @@ -73,25 +72,17 @@ module.exports = (repoPath, opts) => { */ ipfs.fail = function ipfsFail () { let args = Array.from(arguments) - let caught = false + if (args.length === 1) { args = args[0].split(' ') } return exec(args) - .catch(err => { - caught = true - expect(err).to.exist() - + .then(() => { + throw new Error(`jsipfs expected to fail during command: jsipfs ${args.join(' ')}`) + }, (err) => { return err }) - .then((res) => { - if (!caught) { - throw new Error(`jsipfs expected to fail during command: jsipfs ${args.join(' ')}`) - } - - return res - }) } return ipfs diff --git a/test/utils/on-and-off.js b/test/utils/on-and-off.js index dc29ed5488..d262876920 100644 --- a/test/utils/on-and-off.js +++ b/test/utils/on-and-off.js @@ -8,7 +8,9 @@ const clean = require('../utils/clean') const os = require('os') const DaemonFactory = require('ipfsd-ctl') -const df = DaemonFactory.create() +const df = DaemonFactory.create({ + IpfsClient: require('ipfs-http-client') +}) const path = require('path') function off (tests) { @@ -23,12 +25,13 @@ function off (tests) { repoPath = os.tmpdir() + '/ipfs-' + hat() thing.ipfs = ipfsExec(repoPath) thing.ipfs.repoPath = repoPath + return thing.ipfs('init') }) - after(function () { + after(async function () { this.timeout(20 * 1000) - return clean(repoPath) + await clean(repoPath) }) tests(thing)