diff --git a/package.json b/package.json index e7658600..d1c93f42 100644 --- a/package.json +++ b/package.json @@ -36,7 +36,6 @@ }, "homepage": "https://github.com/ipfs/interface-ipfs-core#readme", "dependencies": { - "async": "^2.6.2", "bl": "^3.0.0", "bs58": "^4.0.1", "callbackify": "^1.1.0", @@ -47,6 +46,7 @@ "delay": "^4.3.0", "dirty-chai": "^2.0.1", "es6-promisify": "^6.0.1", + "get-stream": "^5.1.0", "hat": "0.0.3", "ipfs-block": "~0.8.0", "ipfs-unixfs": "~0.1.16", @@ -61,9 +61,15 @@ "multibase": "~0.6.0", "multihashes": "~0.4.14", "multihashing-async": "~0.6.0", - "peer-id": "~0.12.0", + "p-each-series": "^2.1.0", + "p-map-series": "^2.1.0", + "p-timeout": "^3.2.0", + "p-times": "^2.1.0", + "p-whilst": "^2.1.0", + "peer-id": "~0.13.5", "peer-info": "~0.15.0", "pull-stream": "^3.6.11", + "pull-to-promise": "^1.0.1", "pump": "^3.0.0", "readable-stream": "^3.1.1", "streaming-iterables": "^4.1.0", diff --git a/src/bitswap/stat.js b/src/bitswap/stat.js index 02461247..30b5570f 100644 --- a/src/bitswap/stat.js +++ b/src/bitswap/stat.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const waterfall = require('async/waterfall') const { getDescribe, getIt, expect } = require('../utils/mocha') const { expectIsBitswap } = require('../stats/utils') @@ -10,53 +9,26 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.bitswap.stat', () => { + describe('.bitswap.stat', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should get bitswap stats', (done) => { - ipfs.bitswap.stat((err, res) => { - expectIsBitswap(err, res) - done() - }) + it('should get bitswap stats', async () => { + const res = await ipfs.bitswap.stat() + expectIsBitswap(null, res) }) - it('should get bitswap stats (promised)', () => { - return ipfs.bitswap.stat().then((res) => { - expectIsBitswap(null, res) - }) - }) + it('should not get bitswap stats when offline', async () => { + const node = await createCommon().setup() + await node.stop() - it('should not get bitswap stats when offline', function (done) { - this.timeout(60 * 1000) - - waterfall([ - (cb) => createCommon().setup(cb), - (factory, cb) => factory.spawnNode(cb), - (node, cb) => node.stop((err) => cb(err, node)) - ], (err, node) => { - expect(err).to.not.exist() - node.bitswap.wantlist((err) => { - expect(err).to.exist() - done() - }) - }) + return expect(node.bitswap.stat()).to.eventually.be.rejected() }) }) } diff --git a/src/bitswap/utils.js b/src/bitswap/utils.js index c5f2de34..d2b0cd11 100644 --- a/src/bitswap/utils.js +++ b/src/bitswap/utils.js @@ -1,35 +1,24 @@ 'use strict' -const until = require('async/until') +const pWhilst = require('p-whilst') -function waitForWantlistKey (ipfs, key, opts, cb) { - if (typeof opts === 'function') { - cb = opts - opts = {} - } - - opts = opts || {} - opts.timeout = opts.timeout || 1000 +function waitForWantlistKey (ipfs, key, opts = {}) { + opts.timeout = opts.timeout || 10000 let list = { Keys: [] } - let timedOut = false - setTimeout(() => { timedOut = true }, opts.timeout) + const start = Date.now() + const findKey = () => !list.Keys.some(k => k['/'] === key) + + const iteratee = async () => { + if (Date.now() - start > opts.timeout) { + throw new Error(`Timed out waiting for ${key} in wantlist`) + } - const test = () => timedOut ? true : list.Keys.some(k => k['/'] === key) - const iteratee = (cb) => { - ipfs.bitswap.wantlist(opts.peerId, (err, nextList) => { - if (err) return cb(err) - list = nextList - cb() - }) + list = await ipfs.bitswap.wantlist(opts.peerId) } - until(test, iteratee, (err) => { - if (err) return cb(err) - if (timedOut) return cb(new Error(`Timed out waiting for ${key} in wantlist`)) - cb() - }) + return pWhilst(findKey, iteratee) } module.exports.waitForWantlistKey = waitForWantlistKey diff --git a/src/bitswap/wantlist.js b/src/bitswap/wantlist.js index cdc11c57..34480a90 100644 --- a/src/bitswap/wantlist.js +++ b/src/bitswap/wantlist.js @@ -2,74 +2,45 @@ /* eslint max-nested-callbacks: ["error", 6] */ 'use strict' -const waterfall = require('async/waterfall') -const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') const { waitForWantlistKey } = require('./utils') -const { connect } = require('../utils/swarm') module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) const common = createCommon() - describe('.bitswap.wantlist', () => { + describe('.bitswap.wantlist', function () { + this.timeout(60 * 1000) let ipfsA let ipfsB const key = 'QmUBdnXXPyoDFXj3Hj39dNJ5VkN3QFRskXxcGaYFBB8CNR' - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) + before(async () => { + ipfsA = await common.setup() + ipfsB = await common.setup() - common.setup((err, factory) => { - expect(err).to.not.exist() + // Add key to the wantlist for ipfsB + ipfsB.block.get(key, () => {}) - spawnNodesWithId(2, factory, (err, nodes) => { - expect(err).to.not.exist() - - ipfsA = nodes[0] - ipfsB = nodes[1] - - // Add key to the wantlist for ipfsB - ipfsB.block.get(key, () => {}) - - connect(ipfsA, ipfsB.peerId.addresses[0], done) - }) - }) + await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) }) - after(function (done) { - this.timeout(30 * 1000) - common.teardown(done) - }) + after(() => common.teardown()) - it('should get the wantlist', (done) => { - waitForWantlistKey(ipfsB, key, done) + it('should get the wantlist', () => { + return waitForWantlistKey(ipfsB, key) }) - it('should get the wantlist by peer ID for a diffreent node', (done) => { - ipfsB.id((err, info) => { - expect(err).to.not.exist() - waitForWantlistKey(ipfsA, key, { peerId: info.id }, done) - }) + it('should get the wantlist by peer ID for a diffreent node', () => { + return waitForWantlistKey(ipfsA, key, { peerId: ipfsB.peerId.id }) }) - it('should not get the wantlist when offline', function (done) { - this.timeout(60 * 1000) + it('should not get the wantlist when offline', async () => { + const node = await createCommon().setup() + await node.stop() - waterfall([ - (cb) => createCommon().setup(cb), - (factory, cb) => factory.spawnNode(cb), - (node, cb) => node.stop((err) => cb(err, node)) - ], (err, node) => { - expect(err).to.not.exist() - node.bitswap.wantlist((err) => { - expect(err).to.exist() - done() - }) - }) + return expect(node.bitswap.stat()).to.eventually.be.rejected() }) }) } diff --git a/src/block/get.js b/src/block/get.js index 0cac5825..38e2a659 100644 --- a/src/block/get.js +++ b/src/block/get.js @@ -3,7 +3,6 @@ const multihash = require('multihashes') const CID = require('cids') -const auto = require('async/auto') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -12,108 +11,78 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.block.get', function () { + this.timeout(60 * 1000) const data = Buffer.from('blorb') let ipfs, hash - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - auto({ - factory: (cb) => common.setup(cb), - ipfs: ['factory', (res, cb) => res.factory.spawnNode(cb)], - block: ['ipfs', (res, cb) => res.ipfs.block.put(data, cb)] - }, (err, res) => { - if (err) return done(err) - ipfs = res.ipfs - hash = res.block.cid.multihash - done() - }) + before(async () => { + ipfs = await common.setup() + const block = await ipfs.block.put(data) + hash = block.cid.multihash }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should get by CID object', (done) => { + it('should get by CID object', async () => { const cid = new CID(hash) + const block = await ipfs.block.get(cid) - ipfs.block.get(cid, (err, block) => { - expect(err).to.not.exist() - expect(block.data).to.eql(Buffer.from('blorb')) - expect(block.cid.multihash).to.eql(cid.multihash) - done() - }) + expect(block.data).to.eql(Buffer.from('blorb')) + expect(block.cid.multihash).to.eql(cid.multihash) }) - it('should get by CID in string', (done) => { - ipfs.block.get(multihash.toB58String(hash), (err, block) => { - expect(err).to.not.exist() - expect(block.data).to.eql(Buffer.from('blorb')) - expect(block.cid.multihash).to.eql(hash) - done() - }) + it('should get by CID in string', async () => { + const block = await ipfs.block.get(multihash.toB58String(hash)) + + expect(block.data).to.eql(Buffer.from('blorb')) + expect(block.cid.multihash).to.eql(hash) }) - it('should get an empty block', (done) => { - ipfs.block.put(Buffer.alloc(0), { + it('should get an empty block', async () => { + const res = await ipfs.block.put(Buffer.alloc(0), { format: 'dag-pb', mhtype: 'sha2-256', version: 0 - }, (err, block) => { - expect(err).to.not.exist() - - ipfs.block.get(block.cid, (err, block) => { - expect(err).to.not.exist() - expect(block.data).to.eql(Buffer.alloc(0)) - done() - }) }) + + const block = await ipfs.block.get(res.cid) + + expect(block.data).to.eql(Buffer.alloc(0)) }) - it('should get a block added as CIDv0 with a CIDv1', done => { + it('should get a block added as CIDv0 with a CIDv1', async () => { const input = Buffer.from(`TEST${Date.now()}`) - ipfs.block.put(input, { version: 0 }, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.block.put(input, { version: 0 }) - const cidv0 = res.cid - expect(cidv0.version).to.equal(0) + const cidv0 = res.cid + expect(cidv0.version).to.equal(0) - const cidv1 = cidv0.toV1() + const cidv1 = cidv0.toV1() - ipfs.block.get(cidv1, (err, output) => { - expect(err).to.not.exist() - expect(output.data).to.eql(input) - done() - }) - }) + const block = await ipfs.block.get(cidv1) + expect(block.data).to.eql(input) }) - it('should get a block added as CIDv1 with a CIDv0', done => { + it('should get a block added as CIDv1 with a CIDv0', async () => { const input = Buffer.from(`TEST${Date.now()}`) - ipfs.block.put(input, { version: 1 }, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.block.put(input, { version: 1 }) - const cidv1 = res.cid - expect(cidv1.version).to.equal(1) + const cidv1 = res.cid + expect(cidv1.version).to.equal(1) - const cidv0 = cidv1.toV0() + const cidv0 = cidv1.toV0() - ipfs.block.get(cidv0, (err, output) => { - expect(err).to.not.exist() - expect(output.data).to.eql(input) - done() - }) - }) + const block = await ipfs.block.get(cidv0) + expect(block.data).to.eql(input) }) it('should return an error for an invalid CID', () => { - return ipfs.block.get('invalid') - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.block.get('invalid')).to.eventually.be.rejected + .and.be.an.instanceOf(Error) + .and.have.property('message') + .that.include('Non-base58 character') }) }) } diff --git a/src/block/put.js b/src/block/put.js index 4100e366..1df2edc9 100644 --- a/src/block/put.js +++ b/src/block/put.js @@ -11,88 +11,68 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.block.put', () => { + describe('.block.put', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should put a buffer, using defaults', (done) => { + it('should put a buffer, using defaults', async () => { const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' const blob = Buffer.from('blorb') - ipfs.block.put(blob, (err, block) => { - expect(err).to.not.exist() - expect(block.data).to.be.eql(blob) - expect(block.cid.multihash).to.eql(multihash.fromB58String(expectedHash)) - done() - }) + const block = await ipfs.block.put(blob) + + expect(block.data).to.be.eql(blob) + expect(block.cid.multihash).to.eql(multihash.fromB58String(expectedHash)) }) - it('should put a buffer, using CID', (done) => { + it('should put a buffer, using CID', async () => { const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' const cid = new CID(expectedHash) const blob = Buffer.from('blorb') - ipfs.block.put(blob, { cid: cid }, (err, block) => { - expect(err).to.not.exist() - expect(block.data).to.be.eql(blob) - expect(block.cid.multihash).to.eql(multihash.fromB58String(expectedHash)) - done() - }) + const block = await ipfs.block.put(blob, { cid: cid }) + + expect(block.data).to.be.eql(blob) + expect(block.cid.multihash).to.eql(multihash.fromB58String(expectedHash)) }) - it('should put a buffer, using options', (done) => { + it('should put a buffer, using options', async () => { const blob = Buffer.from(`TEST${Date.now()}`) - ipfs.block.put(blob, { + const block = await ipfs.block.put(blob, { format: 'raw', mhtype: 'sha2-512', version: 1 - }, (err, block) => { - expect(err).to.not.exist() - expect(block.data).to.be.eql(blob) - expect(block.cid.version).to.equal(1) - expect(block.cid.codec).to.equal('raw') - expect(multihash.decode(block.cid.multihash).name).to.equal('sha2-512') - done() }) + + expect(block.data).to.be.eql(blob) + expect(block.cid.version).to.equal(1) + expect(block.cid.codec).to.equal('raw') + expect(multihash.decode(block.cid.multihash).name).to.equal('sha2-512') }) - it('should put a Block instance', (done) => { + it('should put a Block instance', async () => { const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' const cid = new CID(expectedHash) const b = new Block(Buffer.from('blorb'), cid) - ipfs.block.put(b, (err, block) => { - expect(err).to.not.exist() - expect(block.data).to.eql(Buffer.from('blorb')) - expect(block.cid.multihash).to.eql(multihash.fromB58String(expectedHash)) - done() - }) + const block = await ipfs.block.put(b) + + expect(block.data).to.eql(Buffer.from('blorb')) + expect(block.cid.multihash).to.eql(multihash.fromB58String(expectedHash)) }) - it('should error with array of blocks', (done) => { + it('should error with array of blocks', () => { const blob = Buffer.from('blorb') - ipfs.block.put([blob, blob], (err) => { - expect(err).to.be.an.instanceof(Error) - done() - }) + return expect(ipfs.block.put([blob, blob])).to.eventually.be.rejected + .and.be.an.instanceOf(Error) }) }) } diff --git a/src/block/rm.js b/src/block/rm.js index 0cca8036..8f99fa23 100644 --- a/src/block/rm.js +++ b/src/block/rm.js @@ -10,24 +10,14 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.block.rm', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should remove by CID object', async () => { const cid = await ipfs.dag.put(Buffer.from(hat()), { diff --git a/src/block/stat.js b/src/block/stat.js index d5bf3b11..0d1a5976 100644 --- a/src/block/stat.js +++ b/src/block/stat.js @@ -2,7 +2,6 @@ 'use strict' const CID = require('cids') -const auto = require('async/auto') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -10,54 +9,36 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.block.stat', () => { + describe('.block.stat', function () { + this.timeout(60 * 1000) const data = Buffer.from('blorb') let ipfs, hash - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - auto({ - factory: (cb) => common.setup(cb), - ipfs: ['factory', (res, cb) => res.factory.spawnNode(cb)], - block: ['ipfs', (res, cb) => res.ipfs.block.put(data, cb)] - }, (err, res) => { - if (err) return done(err) - ipfs = res.ipfs - hash = res.block.cid.multihash - done() - }) + before(async () => { + ipfs = await common.setup() + const block = await ipfs.block.put(data) + hash = block.cid.multihash }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should stat by CID', (done) => { + it('should stat by CID', async () => { const cid = new CID(hash) - ipfs.block.stat(cid, (err, stats) => { - expect(err).to.not.exist() - expect(stats).to.have.property('key') - expect(stats).to.have.property('size') - done() - }) + const stats = await ipfs.block.stat(cid) + + expect(stats).to.have.property('key') + expect(stats).to.have.property('size') }) it('should return error for missing argument', () => { - return ipfs.block.stat(null) - .then( - () => expect.fail('should have thrown for missing parameter'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.block.stat(null)).to.eventually.be.rejected + .and.be.an.instanceOf(Error) }) it('should return error for invalid argument', () => { - return ipfs.block.stat('invalid') - .then( - () => expect.fail('should have thrown for invalid parameter'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.block.stat('invalid')).to.eventually.be.rejected + .and.be.an.instanceOf(Error) }) }) } diff --git a/src/bootstrap/add.js b/src/bootstrap/add.js index 61c5ff56..eb803466 100644 --- a/src/bootstrap/add.js +++ b/src/bootstrap/add.js @@ -16,49 +16,30 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should return an error when called with an invalid arg', (done) => { - ipfs.bootstrap.add(invalidArg, (err) => { - expect(err).to.be.an.instanceof(Error) - done() - }) + it('should return an error when called with an invalid arg', () => { + return expect(ipfs.bootstrap.add(invalidArg)).to.eventually.be.rejected + .and.be.an.instanceOf(Error) }) - it('should return a list containing the bootstrap peer when called with a valid arg (ip4)', (done) => { - ipfs.bootstrap.add(validIp4, (err, res) => { - expect(err).to.not.exist() - expect(res).to.be.eql({ Peers: [validIp4] }) - const peers = res.Peers - expect(peers).to.exist() - expect(peers.length).to.eql(1) - done() - }) + it('should return a list containing the bootstrap peer when called with a valid arg (ip4)', async () => { + const res = await ipfs.bootstrap.add(validIp4) + + expect(res).to.be.eql({ Peers: [validIp4] }) + const peers = res.Peers + expect(peers).to.have.property('length').that.is.equal(1) }) - it('should return a list of bootstrap peers when called with the default option', (done) => { - ipfs.bootstrap.add(null, { default: true }, (err, res) => { - expect(err).to.not.exist() - const peers = res.Peers - expect(peers).to.exist() - expect(peers.length).to.above(1) - done() - }) + it('should return a list of bootstrap peers when called with the default option', async () => { + const res = await ipfs.bootstrap.add(null, { default: true }) + + const peers = res.Peers + expect(peers).to.have.property('length').that.is.gt(1) }) it('should prevent duplicate inserts of bootstrap peers', async () => { diff --git a/src/bootstrap/list.js b/src/bootstrap/list.js index 31bfe3f6..ffb75c23 100644 --- a/src/bootstrap/list.js +++ b/src/bootstrap/list.js @@ -13,30 +13,17 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should return a list of peers', (done) => { - ipfs.bootstrap.list((err, res) => { - expect(err).to.not.exist() - const peers = res.Peers - expect(peers).to.exist() - done() - }) + it('should return a list of peers', async () => { + const res = await ipfs.bootstrap.list() + + const peers = res.Peers + expect(peers).to.exist() }) }) } diff --git a/src/bootstrap/rm.js b/src/bootstrap/rm.js index 24a77b6c..172cf014 100644 --- a/src/bootstrap/rm.js +++ b/src/bootstrap/rm.js @@ -9,63 +9,50 @@ module.exports = (createCommon, options) => { const common = createCommon() const invalidArg = 'this/Is/So/Invalid/' + const validIp4 = '/ip4/104.236.176.52/tcp/4001/ipfs/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z' describe('.bootstrap.rm', function () { this.timeout(100 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should return an error when called with an invalid arg', (done) => { - ipfs.bootstrap.rm(invalidArg, (err) => { - expect(err).to.be.an.instanceof(Error) - done() - }) + it('should return an error when called with an invalid arg', () => { + return expect(ipfs.bootstrap.rm(invalidArg)).to.eventually.be.rejected + .and.be.an.instanceOf(Error) }) - it('should return an empty list because no peers removed when called without an arg or options', (done) => { - ipfs.bootstrap.rm(null, (err, res) => { - expect(err).to.not.exist() - const peers = res.Peers - expect(peers).to.exist() - expect(peers.length).to.eql(0) - done() - }) + it('should return an empty list because no peers removed when called without an arg or options', async () => { + const res = await ipfs.bootstrap.rm(null) + + const peers = res.Peers + expect(peers).to.have.property('length').that.is.equal(0) }) - it('should return a list containing the peer removed when called with a valid arg (ip4)', (done) => { - ipfs.bootstrap.rm(null, (err, res) => { - expect(err).to.not.exist() - const peers = res.Peers - expect(peers).to.exist() - expect(peers.length).to.eql(0) - done() - }) + it('should return a list containing the peer removed when called with a valid arg (ip4)', async () => { + const addRes = await ipfs.bootstrap.add(validIp4) + expect(addRes).to.be.eql({ Peers: [validIp4] }) + + const rmRes = await ipfs.bootstrap.rm(validIp4) + expect(rmRes).to.be.eql({ Peers: [validIp4] }) + + const peers = rmRes.Peers + expect(peers).to.have.property('length').that.is.equal(1) }) - it('should return a list of all peers removed when all option is passed', (done) => { - ipfs.bootstrap.rm(null, { all: true }, (err, res) => { - expect(err).to.not.exist() - const peers = res.Peers - expect(peers).to.exist() - done() - }) + it('should return a list of all peers removed when all option is passed', async () => { + const addRes = await ipfs.bootstrap.add(null, { default: true }) + const addedPeers = addRes.Peers + + const rmRes = await ipfs.bootstrap.rm(null, { all: true }) + const removedPeers = rmRes.Peers + + expect(removedPeers).to.eql(addedPeers) }) }) } diff --git a/src/config/get.js b/src/config/get.js index bedb5001..199257e3 100644 --- a/src/config/get.js +++ b/src/config/get.js @@ -10,71 +10,38 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.config.get', function () { - this.timeout(30 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should retrieve the whole config', (done) => { - ipfs.config.get((err, config) => { - expect(err).to.not.exist() - expect(config).to.be.an('object') - expect(isPlainObject(config)).to.equal(true) - done() - }) - }) + it('should retrieve the whole config', async () => { + const config = await ipfs.config.get() - it('should retrieve the whole config (promised)', () => { - return ipfs.config.get() - .then((config) => { - expect(config).to.be.an('object') - expect(isPlainObject(config)).to.equal(true) - }) + expect(config).to.be.an('object') + expect(isPlainObject(config)).to.equal(true) }) - it('should retrieve a value through a key', (done) => { - ipfs.config.get('Identity.PeerID', (err, peerId) => { - expect(err).to.not.exist() - expect(peerId).to.exist() - done() - }) + it('should retrieve a value through a key', async () => { + const peerId = await ipfs.config.get('Identity.PeerID') + expect(peerId).to.exist() }) - it('should retrieve a value through a nested key', (done) => { - ipfs.config.get('Addresses.Swarm', (err, swarmAddrs) => { - expect(err).to.not.exist() - expect(swarmAddrs).to.exist() - done() - }) + it('should retrieve a value through a nested key', async () => { + const swarmAddrs = await ipfs.config.get('Addresses.Swarm') + expect(swarmAddrs).to.exist() }) - it('should fail on non valid key', (done) => { - ipfs.config.get(1234, (err, peerId) => { - expect(err).to.exist() - done() - }) + it('should fail on non valid key', () => { + return expect(ipfs.config.get(1234)).to.eventually.be.rejected() }) - it('should fail on non existent key', (done) => { - ipfs.config.get('Bananas', (err, peerId) => { - expect(err).to.exist() - done() - }) + it('should fail on non existent key', () => { + return expect(ipfs.config.get('Bananas')).to.eventually.be.rejected() }) }) } diff --git a/src/config/profiles/apply.js b/src/config/profiles/apply.js index c042e20a..7cd96f78 100644 --- a/src/config/profiles/apply.js +++ b/src/config/profiles/apply.js @@ -12,22 +12,11 @@ module.exports = (createCommon, options) => { this.timeout(30 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should apply a config profile', async () => { const diff = await ipfs.config.profiles.apply('lowpower') diff --git a/src/config/profiles/list.js b/src/config/profiles/list.js index 585baecf..69482b21 100644 --- a/src/config/profiles/list.js +++ b/src/config/profiles/list.js @@ -9,25 +9,14 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.config.profiles.list', function () { - this.timeout(30 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should list config profiles', async () => { const profiles = await ipfs.config.profiles.list() diff --git a/src/config/replace.js b/src/config/replace.js index 8620ea09..bab2133c 100644 --- a/src/config/replace.js +++ b/src/config/replace.js @@ -9,50 +9,31 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.config.replace', function () { - this.timeout(30 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) const config = { Fruit: 'Bananas' } - it('should replace the whole config', (done) => { - ipfs.config.replace(config, (err) => { - expect(err).to.not.exist() - ipfs.config.get((err, _config) => { - expect(err).to.not.exist() - expect(_config).to.deep.equal(config) - done() - }) - }) + it('should replace the whole config', async () => { + await ipfs.config.replace(config) + + const _config = await ipfs.config.get() + expect(_config).to.deep.equal(config) }) - it('should replace to empty config', (done) => { - ipfs.config.replace({}, (err) => { - expect(err).to.not.exist() - ipfs.config.get((err, _config) => { - expect(err).to.not.exist() - expect(_config).to.deep.equal({}) - done() - }) - }) + it('should replace to empty config', async () => { + await ipfs.config.replace({}) + + const _config = await ipfs.config.get() + expect(_config).to.deep.equal({}) }) }) } diff --git a/src/config/set.js b/src/config/set.js index 7843b60b..c808c4ec 100644 --- a/src/config/set.js +++ b/src/config/set.js @@ -9,67 +9,37 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.config.set', function () { - this.timeout(30 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) - - it('should set a new key', (done) => { - ipfs.config.set('Fruit', 'banana', (err) => { - expect(err).to.not.exist() - ipfs.config.get('Fruit', (err, fruit) => { - expect(err).to.not.exist() - expect(fruit).to.equal('banana') - done() - }) - }) - }) + after(() => common.teardown()) + + it('should set a new key', async () => { + await ipfs.config.set('Fruit', 'banana') - it('should set a new key (promised)', () => { - return ipfs.config.set('Fruit', 'banana') - .then(() => ipfs.config.get('Fruit')) - .then((fruit) => { - expect(fruit).to.equal('banana') - }) + const fruit = await ipfs.config.get('Fruit') + expect(fruit).to.equal('banana') }) - it('should set an already existing key', (done) => { - ipfs.config.set('Fruit', 'morango', (err) => { - expect(err).to.not.exist() - ipfs.config.get('Fruit', (err, fruit) => { - expect(err).to.not.exist() - expect(fruit).to.equal('morango') - done() - }) - }) + it('should set an already existing key', async () => { + await ipfs.config.set('Fruit', 'morango') + + const fruit = await ipfs.config.get('Fruit') + expect(fruit).to.equal('morango') }) - it('should set a number', (done) => { + it('should set a number', async () => { const key = 'Discovery.MDNS.Interval' const val = 11 - ipfs.config.set(key, val, function (err) { - expect(err).to.not.exist() - ipfs.config.get(key, function (err, result) { - expect(err).to.not.exist() - expect(result).to.equal(val) - done() - }) - }) + + await ipfs.config.set(key, val) + + const result = await ipfs.config.get(key) + expect(result).to.equal(val) }) it('should set a boolean', async () => { @@ -88,31 +58,22 @@ module.exports = (createCommon, options) => { expect(await ipfs.config.get(key)).to.equal(value) }) - it('should set a JSON object', (done) => { + it('should set a JSON object', async () => { const key = 'API.HTTPHeaders.Access-Control-Allow-Origin' const val = ['http://example.io'] - ipfs.config.set(key, val, function (err) { - expect(err).to.not.exist() - ipfs.config.get(key, function (err, result) { - expect(err).to.not.exist() - expect(result).to.deep.equal(val) - done() - }) - }) + + await ipfs.config.set(key, val) + + const result = await ipfs.config.get(key) + expect(result).to.deep.equal(val) }) - it('should fail on non valid key', (done) => { - ipfs.config.set(Buffer.from('heeey'), '', (err) => { - expect(err).to.exist() - done() - }) + it('should fail on non valid key', () => { + return expect(ipfs.config.set(Buffer.from('heeey'), '')).to.eventually.be.rejected() }) - it('should fail on non valid value', (done) => { - ipfs.config.set('Fruit', Buffer.from('abc'), (err) => { - expect(err).to.exist() - done() - }) + it('should fail on non valid value', () => { + return expect(ipfs.config.set('Fruit', Buffer.from('abc'))).to.eventually.be.rejected() }) }) } diff --git a/src/dag/get.js b/src/dag/get.js index 9a03ffcc..ad68e279 100644 --- a/src/dag/get.js +++ b/src/dag/get.js @@ -1,13 +1,12 @@ /* eslint-env mocha */ 'use strict' -const { series, eachSeries } = require('async') +const pEachSeries = require('p-each-series') const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode const dagCBOR = require('ipld-dag-cbor') const Unixfs = require('ipfs-unixfs') const CID = require('cids') -const { spawnNodeWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -15,26 +14,13 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.dag.get', () => { + describe('.dag.get', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) + before(async () => { ipfs = await common.setup() }) - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodeWithId(factory, (err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - after((done) => common.teardown(done)) + after(() => common.teardown()) let pbNode let cborNode @@ -43,284 +29,184 @@ module.exports = (createCommon, options) => { let cidPb let cidCbor - before((done) => { - series([ - (cb) => { - const someData = Buffer.from('some other data') - - try { - pbNode = new DAGNode(someData) - } catch (err) { - return cb(err) - } - - cborNode = { - data: someData - } - - cb() - }, - (cb) => { - try { - nodePb = new DAGNode(Buffer.from('I am inside a Protobuf')) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - dagPB.util.cid(nodePb.serialize()) - .then(cid => { - cidPb = cid - cb() - }, cb) - }, - (cb) => { - nodeCbor = { - someData: 'I am inside a Cbor object', - pb: cidPb - } - - dagCBOR.util.cid(dagCBOR.util.serialize(nodeCbor)) - .then(cid => { - cidCbor = cid - cb() - }, cb) - }, - (cb) => { - eachSeries([ - { node: nodePb, multicodec: 'dag-pb', hashAlg: 'sha2-256' }, - { node: nodeCbor, multicodec: 'dag-cbor', hashAlg: 'sha2-256' } - ], (el, cb) => { - ipfs.dag.put(el.node, { - format: el.multicodec, - hashAlg: el.hashAlg - }, cb) - }, cb) - } - ], done) + before(async () => { + const someData = Buffer.from('some other data') + pbNode = new DAGNode(someData) + cborNode = { + data: someData + } + + nodePb = new DAGNode(Buffer.from('I am inside a Protobuf')) + cidPb = await dagPB.util.cid(nodePb.serialize()) + nodeCbor = { + someData: 'I am inside a Cbor object', + pb: cidPb + } + + cidCbor = await dagCBOR.util.cid(dagCBOR.util.serialize(nodeCbor)) + + await pEachSeries([ + { node: nodePb, multicodec: 'dag-pb', hashAlg: 'sha2-256' }, + { node: nodeCbor, multicodec: 'dag-cbor', hashAlg: 'sha2-256' } + ], (el) => ipfs.dag.put(el.node, { + format: el.multicodec, + hashAlg: el.hashAlg + })) }) - it('should get a dag-pb node', (done) => { - ipfs.dag.put(pbNode, { + it('should get a dag-pb node', async () => { + const cid = await ipfs.dag.put(pbNode, { format: 'dag-pb', hashAlg: 'sha2-256' - }, (err, cid) => { - expect(err).to.not.exist() - ipfs.dag.get(cid, (err, result) => { - expect(err).to.not.exist() - const node = result.value - expect(pbNode.toJSON()).to.eql(node.toJSON()) - done() - }) }) + + const result = await ipfs.dag.get(cid) + + const node = result.value + expect(pbNode.toJSON()).to.eql(node.toJSON()) }) - it('should get a dag-cbor node', (done) => { - ipfs.dag.put(cborNode, { + it('should get a dag-cbor node', async () => { + const cid = await ipfs.dag.put(cborNode, { format: 'dag-cbor', hashAlg: 'sha2-256' - }, (err, cid) => { - expect(err).to.not.exist() - ipfs.dag.get(cid, (err, result) => { - expect(err).to.not.exist() - - const node = result.value - expect(cborNode).to.eql(node) - done() - }) }) + + const result = await ipfs.dag.get(cid) + + const node = result.value + expect(cborNode).to.eql(node) }) - it('should get a dag-pb node with path', (done) => { - ipfs.dag.get(cidPb, '/', (err, result) => { - expect(err).to.not.exist() + it('should get a dag-pb node with path', async () => { + const result = await ipfs.dag.get(cidPb, '/') - const node = result.value + const node = result.value - dagPB.util.cid(node.serialize()) - .then(cid => { - expect(cid).to.eql(cidPb) - done() - }) - .catch(done) - }) + const cid = await dagPB.util.cid(node.serialize()) + expect(cid).to.eql(cidPb) }) - it('should get a dag-pb node local value', function (done) { - ipfs.dag.get(cidPb, 'Data', (err, result) => { - expect(err).to.not.exist() - expect(result.value).to.eql(Buffer.from('I am inside a Protobuf')) - done() - }) + it('should get a dag-pb node local value', async function () { + const result = await ipfs.dag.get(cidPb, 'Data') + expect(result.value).to.eql(Buffer.from('I am inside a Protobuf')) }) it.skip('should get a dag-pb node value one level deep', (done) => {}) it.skip('should get a dag-pb node value two levels deep', (done) => {}) - it('should get a dag-cbor node with path', (done) => { - ipfs.dag.get(cidCbor, '/', (err, result) => { - expect(err).to.not.exist() + it('should get a dag-cbor node with path', async () => { + const result = await ipfs.dag.get(cidCbor, '/') - const node = result.value + const node = result.value - dagCBOR.util.cid(dagCBOR.util.serialize(node)) - .then(cid => { - expect(cid).to.eql(cidCbor) - done() - }) - .catch(done) - }) + const cid = await dagCBOR.util.cid(dagCBOR.util.serialize(node)) + expect(cid).to.eql(cidCbor) }) - it('should get a dag-cbor node local value', (done) => { - ipfs.dag.get(cidCbor, 'someData', (err, result) => { - expect(err).to.not.exist() - expect(result.value).to.eql('I am inside a Cbor object') - done() - }) + it('should get a dag-cbor node local value', async () => { + const result = await ipfs.dag.get(cidCbor, 'someData') + expect(result.value).to.eql('I am inside a Cbor object') }) it.skip('should get dag-cbor node value one level deep', (done) => {}) it.skip('should get dag-cbor node value two levels deep', (done) => {}) it.skip('should get dag-cbor value via dag-pb node', (done) => {}) - it('should get dag-pb value via dag-cbor node', function (done) { - ipfs.dag.get(cidCbor, 'pb/Data', (err, result) => { - expect(err).to.not.exist() - expect(result.value).to.eql(Buffer.from('I am inside a Protobuf')) - done() - }) + it('should get dag-pb value via dag-cbor node', async function () { + const result = await ipfs.dag.get(cidCbor, 'pb/Data') + expect(result.value).to.eql(Buffer.from('I am inside a Protobuf')) }) - it('should get by CID string', (done) => { + it('should get by CID string', async () => { const cidCborStr = cidCbor.toBaseEncodedString() - ipfs.dag.get(cidCborStr, (err, result) => { - expect(err).to.not.exist() + const result = await ipfs.dag.get(cidCborStr) - const node = result.value + const node = result.value - dagCBOR.util.cid(dagCBOR.util.serialize(node)) - .then(cid => { - expect(cid).to.eql(cidCbor) - done() - }) - .catch(done) - }) + const cid = await dagCBOR.util.cid(dagCBOR.util.serialize(node)) + expect(cid).to.eql(cidCbor) }) - it('should get by CID string + path', function (done) { + it('should get by CID string + path', async function () { const cidCborStr = cidCbor.toBaseEncodedString() - ipfs.dag.get(cidCborStr + '/pb/Data', (err, result) => { - expect(err).to.not.exist() - expect(result.value).to.eql(Buffer.from('I am inside a Protobuf')) - done() - }) + const result = await ipfs.dag.get(cidCborStr + '/pb/Data') + expect(result.value).to.eql(Buffer.from('I am inside a Protobuf')) }) - it('should get only a CID, due to resolving locally only', function (done) { - ipfs.dag.get(cidCbor, 'pb/Data', { localResolve: true }, (err, result) => { - expect(err).to.not.exist() - expect(result.value.equals(cidPb)).to.be.true() - done() - }) + it('should get only a CID, due to resolving locally only', async function () { + const result = await ipfs.dag.get(cidCbor, 'pb/Data', { localResolve: true }) + expect(result.value.equals(cidPb)).to.be.true() }) - it('should get a node added as CIDv0 with a CIDv1', done => { + it('should get a node added as CIDv0 with a CIDv1', async () => { const input = Buffer.from(`TEST${Date.now()}`) const node = new DAGNode(input) - ipfs.dag.put(node, { format: 'dag-pb', hashAlg: 'sha2-256' }, (err, cid) => { - expect(err).to.not.exist() - expect(cid.version).to.equal(0) + const cid = await ipfs.dag.put(node, { format: 'dag-pb', hashAlg: 'sha2-256' }) + expect(cid.version).to.equal(0) - const cidv1 = cid.toV1() + const cidv1 = cid.toV1() - ipfs.dag.get(cidv1, (err, output) => { - expect(err).to.not.exist() - expect(output.value.Data).to.eql(input) - done() - }) - }) + const output = await ipfs.dag.get(cidv1) + expect(output.value.Data).to.eql(input) }) - it('should get a node added as CIDv1 with a CIDv0', done => { + it('should get a node added as CIDv1 with a CIDv0', async () => { const input = Buffer.from(`TEST${Date.now()}`) - ipfs.add(input, { cidVersion: 1, rawLeaves: false }, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.add(input, { cidVersion: 1, rawLeaves: false }) - const cidv1 = new CID(res[0].hash) - expect(cidv1.version).to.equal(1) + const cidv1 = new CID(res[0].hash) + expect(cidv1.version).to.equal(1) - const cidv0 = cidv1.toV0() + const cidv0 = cidv1.toV0() - ipfs.dag.get(cidv0, (err, output) => { - expect(err).to.not.exist() - expect(Unixfs.unmarshal(output.value.Data).data).to.eql(input) - done() - }) - }) + const output = await ipfs.dag.get(cidv0) + expect(Unixfs.unmarshal(output.value.Data).data).to.eql(input) }) - it('should be able to get part of a dag-cbor node', (done) => { + it('should be able to get part of a dag-cbor node', async () => { const cbor = { foo: 'dag-cbor-bar' } - ipfs.dag.put(cbor, { format: 'dag-cbor', hashAlg: 'sha2-256' }, (err, cid) => { - expect(err).to.not.exist() - expect(cid.codec).to.equal('dag-cbor') - cid = cid.toBaseEncodedString('base32') - expect(cid).to.equal('bafyreic6f672hnponukaacmk2mmt7vs324zkagvu4hcww6yba6kby25zce') - ipfs.dag.get(cid, 'foo', (err, result) => { - expect(err).to.not.exist() - expect(result.value).to.equal('dag-cbor-bar') - done() - }) - }) + + let cid = await ipfs.dag.put(cbor, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + expect(cid.codec).to.equal('dag-cbor') + cid = cid.toBaseEncodedString('base32') + expect(cid).to.equal('bafyreic6f672hnponukaacmk2mmt7vs324zkagvu4hcww6yba6kby25zce') + + const result = await ipfs.dag.get(cid, 'foo') + expect(result.value).to.equal('dag-cbor-bar') }) - it('should be able to traverse from one dag-cbor node to another', (done) => { + it('should be able to traverse from one dag-cbor node to another', async () => { const cbor1 = { foo: 'dag-cbor-bar' } - ipfs.dag.put(cbor1, { format: 'dag-cbor', hashAlg: 'sha2-256' }, (err, cid1) => { - expect(err).to.not.exist() + const cid1 = await ipfs.dag.put(cbor1, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + const cbor2 = { other: cid1 } - const cbor2 = { other: cid1 } + const cid2 = await ipfs.dag.put(cbor2, { format: 'dag-cbor', hashAlg: 'sha2-256' }) - ipfs.dag.put(cbor2, { format: 'dag-cbor', hashAlg: 'sha2-256' }, (err, cid2) => { - expect(err).to.not.exist() - - ipfs.dag.get(cid2, 'other/foo', (err, result) => { - expect(err).to.not.exist() - expect(result.value).to.equal('dag-cbor-bar') - done() - }) - }) - }) + const result = await ipfs.dag.get(cid2, 'other/foo') + expect(result.value).to.equal('dag-cbor-bar') }) - it('should be able to get a DAG node with format raw', (done) => { + it('should be able to get a DAG node with format raw', async () => { const buf = Buffer.from([0, 1, 2, 3]) - ipfs.dag.put(buf, { + const cid = await ipfs.dag.put(buf, { format: 'raw', hashAlg: 'sha2-256' - }, (err, cid) => { - expect(err).to.not.exist() - - ipfs.dag.get(cid, (err, result) => { - expect(err).to.not.exist() - expect(result.value).to.deep.equal(buf) - done() - }) }) + + const result = await ipfs.dag.get(cid) + expect(result.value).to.deep.equal(buf) }) }) } diff --git a/src/dag/put.js b/src/dag/put.js index f1532cbd..9ef382c9 100644 --- a/src/dag/put.js +++ b/src/dag/put.js @@ -6,7 +6,6 @@ const DAGNode = dagPB.DAGNode const dagCBOR = require('ipld-dag-cbor') const CID = require('cids') const multihash = require('multihashes') -const { spawnNodeWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -14,26 +13,13 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.dag.put', () => { + describe('.dag.put', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) + before(async () => { ipfs = await common.setup() }) - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodeWithId(factory, (err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - after((done) => common.teardown(done)) + after(() => common.teardown()) let pbNode let cborNode @@ -54,86 +40,63 @@ module.exports = (createCommon, options) => { done() }) - it('should put dag-pb with default hash func (sha2-256)', (done) => { - ipfs.dag.put(pbNode, { + it('should put dag-pb with default hash func (sha2-256)', () => { + return ipfs.dag.put(pbNode, { format: 'dag-pb', hashAlg: 'sha2-256' - }, done) + }) }) - it('should put dag-pb with custom hash func (sha3-512)', (done) => { - ipfs.dag.put(pbNode, { + it('should put dag-pb with custom hash func (sha3-512)', () => { + return ipfs.dag.put(pbNode, { format: 'dag-pb', hashAlg: 'sha3-512' - }, done) + }) }) - it('should put dag-cbor with default hash func (sha2-256)', (done) => { - ipfs.dag.put(cborNode, { + it('should put dag-cbor with default hash func (sha2-256)', () => { + return ipfs.dag.put(cborNode, { format: 'dag-cbor', hashAlg: 'sha2-256' - }, done) + }) }) - it('should put dag-cbor with custom hash func (sha3-512)', (done) => { - ipfs.dag.put(cborNode, { + it('should put dag-cbor with custom hash func (sha3-512)', () => { + return ipfs.dag.put(cborNode, { format: 'dag-cbor', hashAlg: 'sha3-512' - }, done) + }) }) - it('should return the cid', (done) => { - ipfs.dag.put(cborNode, { + it('should return the cid', async () => { + const cid = await ipfs.dag.put(cborNode, { format: 'dag-cbor', hashAlg: 'sha2-256' - }, (err, cid) => { - expect(err).to.not.exist() - expect(cid).to.exist() - expect(CID.isCID(cid)).to.equal(true) - dagCBOR.util.cid(dagCBOR.util.serialize(cborNode)) - .then(_cid => { - expect(cid.buffer).to.eql(_cid.buffer) - done() - }) - .catch(done) }) - }) + expect(cid).to.exist() + expect(CID.isCID(cid)).to.equal(true) - it('should not fail when calling put without options', (done) => { - ipfs.dag.put(cborNode, done) + const _cid = await dagCBOR.util.cid(dagCBOR.util.serialize(cborNode)) + expect(cid.buffer).to.eql(_cid.buffer) }) - it('should not fail when calling put without options (promised)', () => { + it('should not fail when calling put without options', () => { return ipfs.dag.put(cborNode) }) - it('should set defaults when calling put without options', (done) => { - ipfs.dag.put(cborNode, (err, cid) => { - expect(err).to.not.exist() - expect(cid.codec).to.equal('dag-cbor') - expect(multihash.decode(cid.multihash).name).to.equal('sha2-256') - done() - }) - }) - - it('should set defaults when calling put without options (promised)', () => { - return ipfs.dag.put(cborNode) - .then((cid) => { - expect(cid.codec).to.equal('dag-cbor') - expect(multihash.decode(cid.multihash).name).to.equal('sha2-256') - }) + it('should set defaults when calling put without options', async () => { + const cid = await ipfs.dag.put(cborNode) + expect(cid.codec).to.equal('dag-cbor') + expect(multihash.decode(cid.multihash).name).to.equal('sha2-256') }) - it('should override hash algoritm default and resolve with it', (done) => { - ipfs.dag.put(cborNode, { + it('should override hash algoritm default and resolve with it', async () => { + const cid = await ipfs.dag.put(cborNode, { format: 'dag-cbor', hashAlg: 'sha3-512' - }, (err, cid) => { - expect(err).to.not.exist() - expect(cid.codec).to.equal('dag-cbor') - expect(multihash.decode(cid.multihash).name).to.equal('sha3-512') - done() }) + expect(cid.codec).to.equal('dag-cbor') + expect(multihash.decode(cid.multihash).name).to.equal('sha3-512') }) it.skip('should put by passing the cid instead of format and hashAlg', (done) => {}) diff --git a/src/dag/tree.js b/src/dag/tree.js index 4eb6ccf2..a60746f7 100644 --- a/src/dag/tree.js +++ b/src/dag/tree.js @@ -1,12 +1,10 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') -const eachSeries = require('async/eachSeries') +const pEachSeries = require('p-each-series') const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode const dagCBOR = require('ipld-dag-cbor') -const { spawnNodeWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -14,127 +12,74 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.dag.tree', () => { + describe('.dag.tree', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) + before(async () => { ipfs = await common.setup() }) - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodeWithId(factory, (err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - after((done) => common.teardown(done)) + after(() => common.teardown()) let nodePb let nodeCbor let cidPb let cidCbor - before(function (done) { - series([ - (cb) => { - try { - nodePb = new DAGNode(Buffer.from('I am inside a Protobuf')) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - dagPB.util.cid(nodePb.serialize()) - .then(cid => { - cidPb = cid - cb() - }, cb) - }, - (cb) => { - nodeCbor = { - someData: 'I am inside a Cbor object', - pb: cidPb - } - - dagCBOR.util.cid(dagCBOR.util.serialize(nodeCbor)) - .then(cid => { - cidCbor = cid - cb() - }, cb) - }, - (cb) => { - eachSeries([ - { node: nodePb, multicodec: 'dag-pb', hashAlg: 'sha2-256' }, - { node: nodeCbor, multicodec: 'dag-cbor', hashAlg: 'sha2-256' } - ], (el, cb) => { - ipfs.dag.put(el.node, { - format: el.multicodec, - hashAlg: el.hashAlg - }, cb) - }, cb) - } - ], done) + before(async function () { + nodePb = new DAGNode(Buffer.from('I am inside a Protobuf')) + cidPb = await dagPB.util.cid(nodePb.serialize()) + + nodeCbor = { + someData: 'I am inside a Cbor object', + pb: cidPb + } + cidCbor = await dagCBOR.util.cid(dagCBOR.util.serialize(nodeCbor)) + + await pEachSeries([ + { node: nodePb, multicodec: 'dag-pb', hashAlg: 'sha2-256' }, + { node: nodeCbor, multicodec: 'dag-cbor', hashAlg: 'sha2-256' } + ], (el) => ipfs.dag.put(el.node, { + format: el.multicodec, + hashAlg: el.hashAlg + })) }) - it('should get tree with CID', (done) => { - ipfs.dag.tree(cidCbor, (err, paths) => { - expect(err).to.not.exist() - expect(paths).to.eql([ - 'pb', - 'someData' - ]) - done() - }) + it('should get tree with CID', async () => { + const paths = await ipfs.dag.tree(cidCbor) + expect(paths).to.eql([ + 'pb', + 'someData' + ]) }) - it('should get tree with CID and path', (done) => { - ipfs.dag.tree(cidCbor, 'someData', (err, paths) => { - expect(err).to.not.exist() - expect(paths).to.eql([]) - done() - }) + it('should get tree with CID and path', async () => { + const paths = await ipfs.dag.tree(cidCbor, 'someData') + expect(paths).to.eql([]) }) - it('should get tree with CID and path as String', (done) => { + it('should get tree with CID and path as String', async () => { const cidCborStr = cidCbor.toBaseEncodedString() - ipfs.dag.tree(cidCborStr + '/someData', (err, paths) => { - expect(err).to.not.exist() - expect(paths).to.eql([]) - done() - }) + const paths = await ipfs.dag.tree(cidCborStr + '/someData') + expect(paths).to.eql([]) }) - it('should get tree with CID recursive (accross different formats)', (done) => { - ipfs.dag.tree(cidCbor, { recursive: true }, (err, paths) => { - expect(err).to.not.exist() - expect(paths).to.have.members([ - 'pb', - 'someData', - 'pb/Links', - 'pb/Data' - ]) - done() - }) + it('should get tree with CID recursive (accross different formats)', async () => { + const paths = await ipfs.dag.tree(cidCbor, { recursive: true }) + expect(paths).to.have.members([ + 'pb', + 'someData', + 'pb/Links', + 'pb/Data' + ]) }) - it('should get tree with CID and path recursive', (done) => { - ipfs.dag.tree(cidCbor, 'pb', { recursive: true }, (err, paths) => { - expect(err).to.not.exist() - expect(paths).to.have.members([ - 'Links', - 'Data' - ]) - done() - }) + it('should get tree with CID and path recursive', async () => { + const paths = await ipfs.dag.tree(cidCbor, 'pb', { recursive: true }) + expect(paths).to.have.members([ + 'Links', + 'Data' + ]) }) }) } diff --git a/src/dht/find-peer.js b/src/dht/find-peer.js index 028ebb15..898ddf4b 100644 --- a/src/dht/find-peer.js +++ b/src/dht/find-peer.js @@ -1,9 +1,7 @@ /* eslint-env mocha */ 'use strict' -const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') -const { connect } = require('../utils/swarm') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -16,51 +14,27 @@ module.exports = (createCommon, options) => { let nodeA let nodeB - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodesWithId(2, factory, (err, nodes) => { - expect(err).to.not.exist() - - nodeA = nodes[0] - nodeB = nodes[1] - - connect(nodeB, nodeA.peerId.addresses[0], done) - }) - }) + before(async () => { + nodeA = await common.setup() + nodeB = await common.setup() + await nodeB.swarm.connect(nodeA.peerId.addresses[0]) }) - after(function (done) { - this.timeout(50 * 1000) - - common.teardown(done) - }) + after(() => common.teardown()) - it('should find other peers', (done) => { - nodeA.dht.findPeer(nodeB.peerId.id, (err, res) => { - expect(err).to.not.exist() + it('should find other peers', async () => { + const res = await nodeA.dht.findPeer(nodeB.peerId.id) - const id = res.id.toB58String() - const nodeAddresses = nodeB.peerId.addresses.map((addr) => addr.split('/ipfs/')[0]) // remove '/ipfs/' - const peerAddresses = res.multiaddrs.toArray().map((ma) => ma.toString().split('/ipfs/')[0]) + const id = res.id.toB58String() + const nodeAddresses = nodeB.peerId.addresses.map((addr) => addr.split('/ipfs/')[0]) // remove '/ipfs/' + const peerAddresses = res.multiaddrs.toArray().map((ma) => ma.toString().split('/ipfs/')[0]) - expect(id).to.be.eql(nodeB.peerId.id) - expect(nodeAddresses).to.include(peerAddresses[0]) - done() - }) + expect(id).to.be.eql(nodeB.peerId.id) + expect(nodeAddresses).to.include(peerAddresses[0]) }) - it('should fail to find other peer if peer does not exist', (done) => { - nodeA.dht.findPeer('Qmd7qZS4T7xXtsNFdRoK1trfMs5zU94EpokQ9WFtxdPxsZ', (err, peer) => { - expect(err).to.exist() - expect(peer).to.not.exist() - done() - }) + it('should fail to find other peer if peer does not exist', () => { + return expect(nodeA.dht.findPeer('Qmd7qZS4T7xXtsNFdRoK1trfMs5zU94EpokQ9WFtxdPxsZ')).to.eventually.be.rejected() }) }) } diff --git a/src/dht/find-provs.js b/src/dht/find-provs.js index 478eb0d2..4897f4f1 100644 --- a/src/dht/find-provs.js +++ b/src/dht/find-provs.js @@ -2,21 +2,15 @@ 'use strict' const multihashing = require('multihashing-async') -const waterfall = require('async/waterfall') -const parallel = require('async/parallel') const CID = require('cids') -const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') -const { connect } = require('../utils/swarm') -function fakeCid (cb) { +async function fakeCid () { const bytes = Buffer.from(`TEST${Date.now()}`) - multihashing(bytes, 'sha2-256', (err, mh) => { - if (err) { - cb(err) - } - cb(null, new CID(0, 'dag-pb', mh)) - }) + + const mh = await multihashing(bytes, 'sha2-256') + + return new CID(0, 'dag-pb', mh) } module.exports = (createCommon, options) => { @@ -25,82 +19,60 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.dht.findProvs', function () { + this.timeout(80 * 1000) + let nodeA let nodeB let nodeC - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step + before(async function () { this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodesWithId(3, factory, (err, nodes) => { - expect(err).to.not.exist() - - nodeA = nodes[0] - nodeB = nodes[1] - nodeC = nodes[2] - - parallel([ - (cb) => connect(nodeB, nodeA.peerId.addresses[0], cb), - (cb) => connect(nodeC, nodeB.peerId.addresses[0], cb) - ], done) - }) - }) + nodeA = await common.setup() + nodeB = await common.setup() + nodeC = await common.setup() + await Promise.all([ + nodeB.swarm.connect(nodeA.peerId.addresses[0]), + nodeC.swarm.connect(nodeB.peerId.addresses[0]) + ]) }) + after(() => common.teardown()) + let providedCid - before('add providers for the same cid', function (done) { + before('add providers for the same cid', async function () { this.timeout(10 * 1000) - parallel([ - (cb) => nodeB.object.new('unixfs-dir', cb), - (cb) => nodeC.object.new('unixfs-dir', cb) - ], (err, cids) => { - if (err) return done(err) - providedCid = cids[0] - parallel([ - (cb) => nodeB.dht.provide(providedCid, cb), - (cb) => nodeC.dht.provide(providedCid, cb) - ], done) - }) - }) - after(function (done) { - this.timeout(50 * 1000) + const cids = await Promise.all([ + nodeB.object.new('unixfs-dir'), + nodeC.object.new('unixfs-dir') + ]) + + providedCid = cids[0] - common.teardown(done) + await Promise.all([ + nodeB.dht.provide(providedCid), + nodeC.dht.provide(providedCid) + ]) }) - it('should be able to find providers', function (done) { - this.timeout(20 * 1000) - - waterfall([ - (cb) => nodeA.dht.findProvs(providedCid, cb), - (provs, cb) => { - const providerIds = provs.map((p) => p.id.toB58String()) - expect(providerIds).to.have.members([ - nodeB.peerId.id, - nodeC.peerId.id - ]) - cb() - } - ], done) + it('should be able to find providers', async function () { + const provs = await nodeA.dht.findProvs(providedCid) + const providerIds = provs.map((p) => p.id.toB58String()) + + expect(providerIds).to.have.members([ + nodeB.peerId.id, + nodeC.peerId.id + ]) }) - it('should take options to override timeout config', function (done) { + it('should take options to override timeout config', async function () { const options = { timeout: 1 } - waterfall([ - (cb) => fakeCid(cb), - (cidV0, cb) => nodeA.dht.findProvs(cidV0, options, (err) => { - expect(err).to.exist() - cb(null) - }) - ], done) + + const cidV0 = await fakeCid() + + await expect(nodeA.dht.findProvs(cidV0, options)).to.be.rejected() }) }) } diff --git a/src/dht/get.js b/src/dht/get.js index c3d053a5..a71e54d2 100644 --- a/src/dht/get.js +++ b/src/dht/get.js @@ -2,10 +2,7 @@ 'use strict' const hat = require('hat') -const waterfall = require('async/waterfall') -const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') -const { connect } = require('../utils/swarm') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -18,52 +15,27 @@ module.exports = (createCommon, options) => { let nodeA let nodeB - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodesWithId(2, factory, (err, nodes) => { - expect(err).to.not.exist() - - nodeA = nodes[0] - nodeB = nodes[1] - - connect(nodeA, nodeB.peerId.addresses[0], done) - }) - }) + before(async () => { + nodeA = await common.setup() + nodeB = await common.setup() + await nodeA.swarm.connect(nodeB.peerId.addresses[0]) }) - after(function (done) { - this.timeout(50 * 1000) + after(() => common.teardown()) - common.teardown(done) + it('should error when getting a non-existent key from the DHT', () => { + return expect(nodeA.dht.get('non-existing', { timeout: 100 })).to.eventually.be.rejected + .and.be.an.instanceOf(Error) }) - it('should error when getting a non-existent key from the DHT', (done) => { - nodeA.dht.get('non-existing', { timeout: 100 }, (err, value) => { - expect(err).to.be.an.instanceof(Error) - done() - }) - }) - - it('should get a value after it was put on another node', function (done) { - this.timeout(80 * 1000) - + it('should get a value after it was put on another node', async () => { const key = Buffer.from(hat()) const value = Buffer.from(hat()) - waterfall([ - cb => nodeB.dht.put(key, value, cb), - cb => nodeA.dht.get(key, cb), - (result, cb) => { - expect(result).to.eql(value) - cb() - } - ], done) + await nodeB.dht.put(key, value) + const result = await nodeA.dht.get(key) + + expect(result).to.eql(value) }) }) } diff --git a/src/dht/provide.js b/src/dht/provide.js index ca927a2f..456dcbc7 100644 --- a/src/dht/provide.js +++ b/src/dht/provide.js @@ -2,9 +2,7 @@ 'use strict' const CID = require('cids') -const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') -const { connect } = require('../utils/swarm') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -16,102 +14,55 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodesWithId(2, factory, (err, nodes) => { - expect(err).to.not.exist() - ipfs = nodes[0] - connect(ipfs, nodes[1].peerId.addresses[0], done) - }) - }) + before(async () => { + ipfs = await common.setup() + const nodeB = await common.setup() + await ipfs.swarm.connect(nodeB.peerId.addresses[0]) }) - after(function (done) { - this.timeout(50 * 1000) - - common.teardown(done) - }) + after(() => common.teardown()) - it('should provide local CID', (done) => { - ipfs.add(Buffer.from('test'), (err, res) => { - if (err) return done(err) + it('should provide local CID', async () => { + const res = await ipfs.add(Buffer.from('test')) - ipfs.dht.provide(new CID(res[0].hash), (err) => { - expect(err).to.not.exist() - done() - }) - }) + await ipfs.dht.provide(new CID(res[0].hash)) }) - it('should not provide if block not found locally', (done) => { + it('should not provide if block not found locally', () => { const cid = new CID('Qmd7qZS4T7xXtsNFdRoK1trfMs5zU94EpokQ9WFtxdPxsZ') - ipfs.dht.provide(cid, (err) => { - expect(err).to.exist() - expect(err.message).to.include('not found locally') - done() - }) + return expect(ipfs.dht.provide(cid)).to.eventually.be.rejected + .and.be.an.instanceOf(Error) + .and.have.property('message') + .that.include('not found locally') }) - it('should allow multiple CIDs to be passed', (done) => { - ipfs.add([ + it('should allow multiple CIDs to be passed', async () => { + const res = await ipfs.add([ { content: Buffer.from('t0') }, { content: Buffer.from('t1') } - ], (err, res) => { - if (err) return done(err) - - ipfs.dht.provide([ - new CID(res[0].hash), - new CID(res[1].hash) - ], (err) => { - expect(err).to.not.exist() - done() - }) - }) + ]) + + await ipfs.dht.provide([ + new CID(res[0].hash), + new CID(res[1].hash) + ]) }) - it('should provide a CIDv1', (done) => { - ipfs.add(Buffer.from('test'), { cidVersion: 1 }, (err, res) => { - if (err) return done(err) + it('should provide a CIDv1', async () => { + const res = await ipfs.add(Buffer.from('test'), { cidVersion: 1 }) - const cid = new CID(res[0].hash) + const cid = new CID(res[0].hash) - ipfs.dht.provide(cid, (err) => { - expect(err).to.not.exist() - done() - }) - }) + await ipfs.dht.provide(cid) }) - it('should provide a CIDv1 string', (done) => { - ipfs.add(Buffer.from('test'), { cidVersion: 1 }, (err, res) => { - if (err) return done(err) - - const cid = res[0].hash - ipfs.dht.provide(cid, (err) => { - expect(err).to.not.exist() - done() - }) - }) - }) - it('should error on non CID arg', (done) => { - ipfs.dht.provide({}, (err) => { - expect(err).to.exist() - done() - }) + it('should error on non CID arg', () => { + return expect(ipfs.dht.provide({})).to.eventually.be.rejected() }) - it('should error on array containing non CID arg', (done) => { - ipfs.dht.provide([{}], (err) => { - expect(err).to.exist() - done() - }) + it('should error on array containing non CID arg', () => { + return expect(ipfs.dht.provide([{}])).to.eventually.be.rejected() }) }) } diff --git a/src/dht/put.js b/src/dht/put.js index a1c39794..826c9fcc 100644 --- a/src/dht/put.js +++ b/src/dht/put.js @@ -1,9 +1,7 @@ /* eslint-env mocha */ 'use strict' -const { spawnNodesWithId } = require('../utils/spawn') -const { getDescribe, getIt, expect } = require('../utils/mocha') -const { connect } = require('../utils/swarm') +const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -16,35 +14,19 @@ module.exports = (createCommon, options) => { let nodeA let nodeB - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodesWithId(2, factory, (err, nodes) => { - expect(err).to.not.exist() - - nodeA = nodes[0] - nodeB = nodes[1] - connect(nodeA, nodeB.peerId.addresses[0], done) - }) - }) + before(async () => { + nodeA = await common.setup() + nodeB = await common.setup() + await nodeA.swarm.connect(nodeB.peerId.addresses[0]) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should put a value to the DHT', (done) => { - this.timeout(80 * 1000) + it('should put a value to the DHT', async () => { const key = Buffer.from('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') const data = Buffer.from('data') - nodeA.dht.put(key, data, (err) => { - expect(err).to.not.exist() - done() - }) + await nodeA.dht.put(key, data) }) }) } diff --git a/src/dht/query.js b/src/dht/query.js index 05bfd8dd..2e155911 100644 --- a/src/dht/query.js +++ b/src/dht/query.js @@ -1,9 +1,8 @@ /* eslint-env mocha */ 'use strict' -const { spawnNodesWithId } = require('../utils/spawn') +const pTimeout = require('p-timeout') const { getDescribe, getIt, expect } = require('../utils/mocha') -const { connect } = require('../utils/swarm') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -16,52 +15,32 @@ module.exports = (createCommon, options) => { let nodeA let nodeB - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodesWithId(2, factory, (err, nodes) => { - expect(err).to.not.exist() - - nodeA = nodes[0] - nodeB = nodes[1] - - connect(nodeB, nodeA.peerId.addresses[0], done) - }) - }) + before(async () => { + nodeA = await common.setup() + nodeB = await common.setup() + await nodeB.swarm.connect(nodeA.peerId.addresses[0]) }) - after(function (done) { - this.timeout(50 * 1000) + after(() => common.teardown()) - common.teardown(done) - }) - - it('should return the other node in the query', function (done) { + it('should return the other node in the query', async function () { const timeout = 150 * 1000 this.timeout(timeout) - let skipped = false - - // This test is meh. DHT works best with >= 20 nodes. Therefore a - // failure might happen, but we don't want to report it as such. - // Hence skip the test before the timeout is reached - const timeoutId = setTimeout(function () { - skipped = true - this.skip() - }.bind(this), timeout - 1000) + try { + const peers = await pTimeout(nodeA.dht.query(nodeB.peerId.id), timeout - 1000) - nodeA.dht.query(nodeB.peerId.id, (err, peers) => { - if (skipped) return - clearTimeout(timeoutId) - expect(err).to.not.exist() expect(peers.map((p) => p.id.toB58String())).to.include(nodeB.peerId.id) - done() - }) + } catch (err) { + if (err.name === 'TimeoutError') { + // This test is meh. DHT works best with >= 20 nodes. Therefore a + // failure might happen, but we don't want to report it as such. + // Hence skip the test before the timeout is reached + this.skip() + } else { + throw err + } + } }) }) } diff --git a/src/files-mfs/cp.js b/src/files-mfs/cp.js index 5eea8753..966a8c30 100644 --- a/src/files-mfs/cp.js +++ b/src/files-mfs/cp.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const hat = require('hat') const { fixtures } = require('../files-regular/utils') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -12,68 +11,39 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.files.cp', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) + before(async () => { ipfs = await common.setup() }) - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should copy file, expect error', (done) => { + it('should copy file, expect error', () => { const testDir = `/test-${hat()}` - ipfs.files.cp(`${testDir}/c`, `${testDir}/b`, (err) => { - expect(err).to.exist() - done() - }) + return expect(ipfs.files.cp(`${testDir}/c`, `${testDir}/b`)).to.eventually.be.rejected() }) - it('should copy file, expect no error', (done) => { + it('should copy file, expect no error', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(testDir, { parents: true }, cb), - (cb) => ipfs.files.write(`${testDir}/a`, Buffer.from('TEST'), { create: true }, cb), - (cb) => ipfs.files.cp(`${testDir}/a`, `${testDir}/b`, cb) - ], (err) => { - expect(err).to.not.exist() - done() - }) + await ipfs.files.mkdir(testDir, { parents: true }) + await ipfs.files.write(`${testDir}/a`, Buffer.from('TEST'), { create: true }) + await ipfs.files.cp(`${testDir}/a`, `${testDir}/b`) }) - it('should copy dir, expect error', (done) => { + it('should copy dir, expect error', () => { const testDir = `/test-${hat()}` - ipfs.files.cp(`${testDir}/lv1/lv3`, `${testDir}/lv1/lv4`, (err) => { - expect(err).to.exist() - done() - }) + return expect(ipfs.files.cp(`${testDir}/lv1/lv3`, `${testDir}/lv1/lv4`)).to.eventually.be.rejected() }) - it('should copy dir, expect no error', (done) => { + it('should copy dir, expect no error', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }, cb), - (cb) => ipfs.files.cp(`${testDir}/lv1/lv2`, `${testDir}/lv1/lv3`, cb) - ], (err) => { - expect(err).to.not.exist() - done() - }) + await ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }) + await ipfs.files.cp(`${testDir}/lv1/lv2`, `${testDir}/lv1/lv3`) }) it('should copy from outside of mfs', async () => { diff --git a/src/files-mfs/flush.js b/src/files-mfs/flush.js index 647f0ec4..eb3b66df 100644 --- a/src/files-mfs/flush.js +++ b/src/files-mfs/flush.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -11,53 +10,31 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.files.flush', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should not flush not found file/dir, expect error', (done) => { + it('should not flush not found file/dir, expect error', async () => { const testDir = `/test-${hat()}` - ipfs.files.flush(`${testDir}/404`, (err) => { + try { + await ipfs.files.flush(`${testDir}/404`) + } catch (err) { expect(err).to.exist() - done() - }) + } }) - it('should flush root', (done) => { - ipfs.files.flush((err) => { - expect(err).to.not.exist() - done() - }) - }) + it('should flush root', () => ipfs.files.flush()) - it('should flush specific dir', (done) => { + it('should flush specific dir', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(testDir, { parents: true }, cb), - (cb) => ipfs.files.flush(testDir, cb) - ], (err) => { - expect(err).to.not.exist() - done() - }) + await ipfs.files.mkdir(testDir, { parents: true }) + await ipfs.files.flush(testDir) }) }) } diff --git a/src/files-mfs/ls-pull-stream.js b/src/files-mfs/ls-pull-stream.js index e9977c12..6210eaed 100644 --- a/src/files-mfs/ls-pull-stream.js +++ b/src/files-mfs/ls-pull-stream.js @@ -1,12 +1,9 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') -const pull = require('pull-stream/pull') -const onEnd = require('pull-stream/sinks/on-end') -const collect = require('pull-stream/sinks/collect') +const pullToPromise = require('pull-to-promise') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -14,94 +11,59 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.files.lsPullStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should not ls not found file/dir, expect error', (done) => { + it('should not ls not found file/dir, expect error', () => { const testDir = `/test-${hat()}` - pull( - ipfs.files.lsPullStream(`${testDir}/404`), - onEnd((err) => { - expect(err).to.exist() - expect(err.message).to.include('does not exist') - done() - }) - ) + return expect(pullToPromise.any(ipfs.files.lsPullStream(`${testDir}/404`))).to.eventually.be.rejected + .and.be.an.instanceOf(Error) + .and.have.property('message') + .that.include('does not exist') }) - it('should ls directory', (done) => { + it('should ls directory', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }, cb), - (cb) => ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() - - pull( - ipfs.files.lsPullStream(testDir), - collect((err, entries) => { - expect(err).to.not.exist() - expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ - { name: 'b', type: 0, size: 0, hash: '' }, - { name: 'lv1', type: 0, size: 0, hash: '' } - ]) - done() - }) - ) - }) + await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }) + await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) + + const entries = await pullToPromise.any(ipfs.files.lsPullStream(testDir)) + + expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ + { name: 'b', type: 0, size: 0, hash: '' }, + { name: 'lv1', type: 0, size: 0, hash: '' } + ]) }) - it('should ls directory with long option', (done) => { + it('should ls directory with long option', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }, cb), - (cb) => ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() - - pull( - ipfs.files.lsPullStream(testDir, { long: true }), - collect((err, entries) => { - expect(err).to.not.exist() - expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ - { - name: 'b', - type: 0, - size: 13, - hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T' - }, - { - name: 'lv1', - type: 1, - size: 0, - hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' - } - ]) - done() - }) - ) - }) + await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }) + await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) + + const entries = await pullToPromise.any(ipfs.files.lsPullStream(testDir, { long: true })) + + expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ + { + name: 'b', + type: 0, + size: 13, + hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T' + }, + { + name: 'lv1', + type: 1, + size: 0, + hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' + } + ]) }) }) } diff --git a/src/files-mfs/ls-readable-stream.js b/src/files-mfs/ls-readable-stream.js index af489a6f..e3a11a82 100644 --- a/src/files-mfs/ls-readable-stream.js +++ b/src/files-mfs/ls-readable-stream.js @@ -1,9 +1,9 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') +const getStream = require('get-stream') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -11,96 +11,63 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.files.lsReadableStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should not ls not found file/dir, expect error', (done) => { + it('should not ls not found file/dir, expect error', () => { const testDir = `/test-${hat()}` - const stream = ipfs.files.lsReadableStream(`${testDir}/404`) - stream.on('data', () => {}) - stream.once('error', (err) => { - expect(err).to.exist() - expect(err.message).to.include('does not exist') - done() - }) + return expect(getStream(stream)).to.eventually.be.rejected + .and.be.an.instanceOf(Error) + .and.have.property('message') + .that.include('does not exist') }) - it('should ls directory', (done) => { + it('should ls directory', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }, cb), - (cb) => ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() - - const stream = ipfs.files.lsReadableStream(testDir) - const entries = [] - - stream.on('data', entry => entries.push(entry)) - - stream.once('end', () => { - expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ - { name: 'b', type: 0, size: 0, hash: '' }, - { name: 'lv1', type: 0, size: 0, hash: '' } - ]) - done() - }) - }) + await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }) + await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) + + const stream = ipfs.files.lsReadableStream(testDir) + + const entries = await getStream.array(stream) + + expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ + { name: 'b', type: 0, size: 0, hash: '' }, + { name: 'lv1', type: 0, size: 0, hash: '' } + ]) }) - it('should ls directory with long option', (done) => { + it('should ls directory with long option', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }, cb), - (cb) => ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() - - const stream = ipfs.files.lsReadableStream(testDir, { long: true }) - const entries = [] - - stream.on('data', entry => entries.push(entry)) - - stream.once('end', () => { - expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ - { - name: 'b', - type: 0, - size: 13, - hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T' - }, - { - name: 'lv1', - type: 1, - size: 0, - hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' - } - ]) - done() - }) - }) + await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }) + await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) + + const stream = ipfs.files.lsReadableStream(testDir, { long: true }) + const entries = await getStream.array(stream) + + expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ + { + name: 'b', + type: 0, + size: 13, + hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T' + }, + { + name: 'lv1', + type: 1, + size: 0, + hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' + } + ]) }) }) } diff --git a/src/files-mfs/ls.js b/src/files-mfs/ls.js index 469f16b6..600db8d5 100644 --- a/src/files-mfs/ls.js +++ b/src/files-mfs/ls.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const hat = require('hat') const { fixtures } = require('../files-regular/utils') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -12,85 +11,56 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.files.ls', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should not ls not found file/dir, expect error', (done) => { + it('should not ls not found file/dir, expect error', () => { const testDir = `/test-${hat()}` - ipfs.files.ls(`${testDir}/404`, (err, info) => { - expect(err).to.exist() - expect(info).to.not.exist() - done() - }) + return expect(ipfs.files.ls(`${testDir}/404`)).to.eventually.be.rejected() }) - it('should ls directory', (done) => { + it('should ls directory', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }, cb), - (cb) => ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() - - ipfs.files.ls(testDir, (err, info) => { - expect(err).to.not.exist() - expect(info.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ - { name: 'b', type: 0, size: 0, hash: '' }, - { name: 'lv1', type: 0, size: 0, hash: '' } - ]) - done() - }) - }) + await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }) + await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) + + const info = await ipfs.files.ls(testDir) + + expect(info.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ + { name: 'b', type: 0, size: 0, hash: '' }, + { name: 'lv1', type: 0, size: 0, hash: '' } + ]) }) - it('should ls directory with long option', (done) => { + it('should ls directory with long option', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }, cb), - (cb) => ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() - - ipfs.files.ls(testDir, { long: true }, (err, info) => { - expect(err).to.not.exist() - expect(info.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ - { - name: 'b', - type: 0, - size: 13, - hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T' - }, - { - name: 'lv1', - type: 1, - size: 0, - hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' - } - ]) - done() - }) - }) + await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }) + await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) + + const info = await ipfs.files.ls(testDir, { long: true }) + + expect(info.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ + { + name: 'b', + type: 0, + size: 13, + hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T' + }, + { + name: 'lv1', + type: 1, + size: 0, + hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' + } + ]) }) it('should ls from outside of mfs', async () => { diff --git a/src/files-mfs/mkdir.js b/src/files-mfs/mkdir.js index 81190a42..9cc0dd5d 100644 --- a/src/files-mfs/mkdir.js +++ b/src/files-mfs/mkdir.js @@ -10,50 +10,28 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.files.mkdir', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should make directory on root', (done) => { + it('should make directory on root', () => { const testDir = `/test-${hat()}` - ipfs.files.mkdir(testDir, (err) => { - expect(err).to.not.exist() - done() - }) + return ipfs.files.mkdir(testDir) }) - it('should make directory and its parents', (done) => { + it('should make directory and its parents', () => { const testDir = `/test-${hat()}` - ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }, (err) => { - expect(err).to.not.exist() - done() - }) + return ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }) }) - it('should not make already existent directory', (done) => { - ipfs.files.mkdir('/', (err) => { - expect(err).to.exist() - done() - }) + it('should not make already existent directory', () => { + return expect(ipfs.files.mkdir('/')).to.eventually.be.rejected() }) }) } diff --git a/src/files-mfs/mv.js b/src/files-mfs/mv.js index f98b71a3..7cdbeaf0 100644 --- a/src/files-mfs/mv.js +++ b/src/files-mfs/mv.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -11,70 +10,38 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.files.mv', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) + before(async () => { ipfs = await common.setup() }) - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + await ipfs.files.mkdir('/test/lv1/lv2', { parents: true }) + await ipfs.files.write('/test/a', Buffer.from('Hello, world!'), { create: true }) }) + after(() => common.teardown()) - before((done) => { - series([ - (cb) => ipfs.files.mkdir('/test/lv1/lv2', { parents: true }, cb), - (cb) => ipfs.files.write('/test/a', Buffer.from('Hello, world!'), { create: true }, cb) - ], done) - }) - - after((done) => common.teardown(done)) - - it('should not move not found file/dir, expect error', (done) => { + it('should not move not found file/dir, expect error', () => { const testDir = `/test-${hat()}` - ipfs.files.mv(`${testDir}/404`, `${testDir}/a`, (err) => { - expect(err).to.exist() - done() - }) + return expect(ipfs.files.mv(`${testDir}/404`, `${testDir}/a`)).to.eventually.be.rejected() }) - it('should move file, expect no error', (done) => { + it('should move file, expect no error', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }, cb), - (cb) => ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() + await ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }) + await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }) - ipfs.files.mv(`${testDir}/a`, `${testDir}/c`, (err) => { - expect(err).to.not.exist() - done() - }) - }) + await ipfs.files.mv(`${testDir}/a`, `${testDir}/c`) }) - it('should move dir, expect no error', (done) => { + it('should move dir, expect no error', async () => { const testDir = `/test-${hat()}` - ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }, (err) => { - expect(err).to.not.exist() - - ipfs.files.mv('/test/lv1/lv2', '/test/lv1/lv4', (err) => { - expect(err).to.not.exist() - done() - }) - }) + await ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }) + await ipfs.files.mv('/test/lv1/lv2', '/test/lv1/lv4') }) }) } diff --git a/src/files-mfs/read-pull-stream.js b/src/files-mfs/read-pull-stream.js index 34cf6c28..6b70a494 100644 --- a/src/files-mfs/read-pull-stream.js +++ b/src/files-mfs/read-pull-stream.js @@ -1,11 +1,9 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') -const pull = require('pull-stream/pull') -const collect = require('pull-stream/sinks/collect') +const pullToPromise = require('pull-to-promise') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -13,58 +11,32 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.files.readPullStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) + before(async () => { ipfs = await common.setup() }) - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should not read not found, expect error', (done) => { + it('should not read not found, expect error', () => { const testDir = `/test-${hat()}` - pull( - ipfs.files.readPullStream(`${testDir}/404`), - collect((err) => { - expect(err).to.exist() - expect(err.message).to.contain('does not exist') - done() - }) - ) + return expect(pullToPromise.any(ipfs.files.readPullStream(`${testDir}/404`))).to.eventually.be.rejected + .and.be.an.instanceOf(Error) + .and.have.property('message') + .that.include('does not exist') }) - it('should read file', (done) => { + it('should read file', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(testDir, cb), - (cb) => ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() + await ipfs.files.mkdir(testDir) + await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }) + + const bufs = await pullToPromise.any(ipfs.files.readPullStream(`${testDir}/a`)) - pull( - ipfs.files.readPullStream(`${testDir}/a`), - collect((err, bufs) => { - expect(err).to.not.exist() - expect(bufs).to.eql([Buffer.from('Hello, world!')]) - done() - }) - ) - }) + expect(bufs).to.eql([Buffer.from('Hello, world!')]) }) }) } diff --git a/src/files-mfs/read-readable-stream.js b/src/files-mfs/read-readable-stream.js index d4f52533..c6f28665 100644 --- a/src/files-mfs/read-readable-stream.js +++ b/src/files-mfs/read-readable-stream.js @@ -1,10 +1,9 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') -const bl = require('bl') +const getStream = require('get-stream') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -12,57 +11,34 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.files.readReadableStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) + before(async () => { ipfs = await common.setup() }) - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should not read not found, expect error', (done) => { + it('should not read not found, expect error', () => { const testDir = `/test-${hat()}` - const stream = ipfs.files.readReadableStream(`${testDir}/404`) - stream.on('data', () => {}) - stream.once('error', (err) => { - expect(err).to.exist() - expect(err.message).to.contain('does not exist') - done() - }) + return expect(getStream(stream)).to.eventually.be.rejected + .and.be.an.instanceOf(Error) + .and.have.property('message') + .that.include('does not exist') }) - it('should read file', (done) => { + it('should read file', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(testDir, cb), - (cb) => ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() + await ipfs.files.mkdir(testDir) + await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }) - const stream = ipfs.files.readReadableStream(`${testDir}/a`) + const stream = ipfs.files.readReadableStream(`${testDir}/a`) - stream.pipe(bl((err, buf) => { - expect(err).to.not.exist() - expect(buf).to.eql(Buffer.from('Hello, world!')) - done() - })) - }) + const buf = await getStream(stream) + expect(buf).to.eql('Hello, world!') }) }) } diff --git a/src/files-mfs/read.js b/src/files-mfs/read.js index 01280349..6b97d91c 100644 --- a/src/files-mfs/read.js +++ b/src/files-mfs/read.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const hat = require('hat') const { fixtures } = require('../files-regular/utils') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -12,52 +11,32 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.files.read', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) + before(async () => { ipfs = await common.setup() }) - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should not read not found, expect error', (done) => { + it('should not read not found, expect error', () => { const testDir = `/test-${hat()}` - ipfs.files.read(`${testDir}/404`, (err) => { - expect(err).to.exist() - expect(err.message).to.contain('does not exist') - done() - }) + return expect(ipfs.files.cp(`${testDir}/c`, `${testDir}/b`)).to.eventually.be.rejected + .and.be.an.instanceOf(Error) + .and.to.have.property('message') + .that.include('does not exist') }) - it('should read file', (done) => { + it('should read file', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(testDir, cb), - (cb) => ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() + await ipfs.files.mkdir(testDir) + await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }) + + const buf = await ipfs.files.read(`${testDir}/a`) - ipfs.files.read(`${testDir}/a`, (err, buf) => { - expect(err).to.not.exist() - expect(buf).to.eql(Buffer.from('Hello, world!')) - done() - }) - }) + expect(buf).to.eql(Buffer.from('Hello, world!')) }) it('should read from outside of mfs', async () => { diff --git a/src/files-mfs/rm.js b/src/files-mfs/rm.js index 649a7926..a8412ea9 100644 --- a/src/files-mfs/rm.js +++ b/src/files-mfs/rm.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -11,63 +10,41 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.files.rm', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should not remove not found file/dir, expect error', (done) => { + it('should not remove not found file/dir, expect error', () => { const testDir = `/test-${hat()}` - ipfs.files.rm(`${testDir}/a`, (err) => { - expect(err).to.exist() - done() - }) + return expect(ipfs.files.rm(`${testDir}/a`)).to.eventually.be.rejected() }) - it('should remove file, expect no error', (done) => { + it('should remove file, expect no error', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(testDir, { parents: true }, cb), - (cb) => ipfs.files.write(`${testDir}/c`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() - - ipfs.files.rm(`${testDir}/c`, (err) => { - expect(err).to.not.exist() - done() - }) - }) + await ipfs.files.mkdir(testDir, { parents: true }) + await ipfs.files.write(`${testDir}/c`, Buffer.from('Hello, world!'), { create: true }) + + await ipfs.files.rm(`${testDir}/c`) + + const contents = await ipfs.files.ls(testDir) + expect(contents).to.be.an('array').and.to.be.empty() }) - it('should remove dir, expect no error', (done) => { + it('should remove dir, expect no error', async () => { const testDir = `/test-${hat()}` - ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }, (err) => { - expect(err).to.not.exist() + await ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }) + + await ipfs.files.rm(`${testDir}/lv1/lv2`, { recursive: true }) - ipfs.files.rm(`${testDir}/lv1/lv2`, { recursive: true }, (err) => { - expect(err).to.not.exist() - done() - }) - }) + const lv1Contents = await ipfs.files.ls(`${testDir}/lv1`) + expect(lv1Contents).to.be.an('array').and.to.be.empty() }) }) } diff --git a/src/files-mfs/stat.js b/src/files-mfs/stat.js index 42c23305..9bc3ccb7 100644 --- a/src/files-mfs/stat.js +++ b/src/files-mfs/stat.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const hat = require('hat') const { fixtures } = require('../files-regular/utils') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -12,128 +11,96 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.files.stat', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - before((done) => ipfs.add(fixtures.smallFile.data, done)) + before(async () => { ipfs = await common.setup() }) + before(async () => { await ipfs.add(fixtures.smallFile.data) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should not stat not found file/dir, expect error', function (done) { + it('should not stat not found file/dir, expect error', function () { const testDir = `/test-${hat()}` - ipfs.files.stat(`${testDir}/404`, (err) => { - expect(err).to.exist() - done() - }) + return expect(ipfs.files.stat(`${testDir}/404`)).to.eventually.be.rejected() }) - it('should stat file', function (done) { + it('should stat file', async function () { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(testDir, { parents: true }, cb), - (cb) => ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() - - ipfs.files.stat(`${testDir}/b`, (err, stat) => { - expect(err).to.not.exist() - expect(stat).to.include({ - type: 'file', - blocks: 1, - size: 13, - hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T', - cumulativeSize: 71, - withLocality: false - }) - expect(stat.local).to.be.undefined() - expect(stat.sizeLocal).to.be.undefined() - done() - }) + await ipfs.files.mkdir(testDir, { parents: true }) + await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) + + const stat = await ipfs.files.stat(`${testDir}/b`) + + expect(stat).to.include({ + type: 'file', + blocks: 1, + size: 13, + hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T', + cumulativeSize: 71, + withLocality: false }) + expect(stat.local).to.be.undefined() + expect(stat.sizeLocal).to.be.undefined() }) - it('should stat dir', function (done) { + it('should stat dir', async function () { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(testDir, { parents: true }, cb), - (cb) => ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() - - ipfs.files.stat(testDir, (err, stat) => { - expect(err).to.not.exist() - expect(stat).to.include({ - type: 'directory', - blocks: 1, - size: 0, - hash: 'QmQGn7EvzJZRbhcwHrp4UeMeS56WsLmrey9JhfkymjzXQu', - cumulativeSize: 118, - withLocality: false - }) - expect(stat.local).to.be.undefined() - expect(stat.sizeLocal).to.be.undefined() - done() - }) + await ipfs.files.mkdir(testDir, { parents: true }) + await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }) + + const stat = await ipfs.files.stat(testDir) + + expect(stat).to.include({ + type: 'directory', + blocks: 1, + size: 0, + hash: 'QmQGn7EvzJZRbhcwHrp4UeMeS56WsLmrey9JhfkymjzXQu', + cumulativeSize: 118, + withLocality: false }) + expect(stat.local).to.be.undefined() + expect(stat.sizeLocal).to.be.undefined() }) // TODO enable this test when this feature gets released on go-ipfs - it.skip('should stat withLocal file', function (done) { - ipfs.files.stat('/test/b', { withLocal: true }, (err, stat) => { - expect(err).to.not.exist() - expect(stat).to.eql({ - type: 'file', - blocks: 1, - size: 13, - hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T', - cumulativeSize: 71, - withLocality: true, - local: true, - sizeLocal: 71 - }) - done() + it.skip('should stat withLocal file', async function () { + const stat = await ipfs.files.stat('/test/b', { withLocal: true }) + + expect(stat).to.eql({ + type: 'file', + blocks: 1, + size: 13, + hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T', + cumulativeSize: 71, + withLocality: true, + local: true, + sizeLocal: 71 }) }) // TODO enable this test when this feature gets released on go-ipfs - it.skip('should stat withLocal dir', function (done) { - ipfs.files.stat('/test', { withLocal: true }, (err, stat) => { - expect(err).to.not.exist() - expect(stat).to.eql({ - type: 'directory', - blocks: 2, - size: 0, - hash: 'QmVrkkNurBCeJvPRohW5JTvJG4AxGrFg7FnmsZZUS6nJto', - cumulativeSize: 216, - withLocality: true, - local: true, - sizeLocal: 216 - }) - done() + it.skip('should stat withLocal dir', async function () { + const stat = await ipfs.files.stat('/test', { withLocal: true }) + + expect(stat).to.eql({ + type: 'directory', + blocks: 2, + size: 0, + hash: 'QmVrkkNurBCeJvPRohW5JTvJG4AxGrFg7FnmsZZUS6nJto', + cumulativeSize: 216, + withLocality: true, + local: true, + sizeLocal: 216 }) }) it('should stat outside of mfs', async () => { const stat = await ipfs.files.stat('/ipfs/' + fixtures.smallFile.cid) + expect(stat).to.include({ type: 'file', blocks: 0, diff --git a/src/files-mfs/write.js b/src/files-mfs/write.js index 7b9a9f4a..6fc3e42d 100644 --- a/src/files-mfs/write.js +++ b/src/files-mfs/write.js @@ -14,58 +14,32 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) + before(async () => { ipfs = await common.setup() }) - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should not write to non existent file, expect error', function (done) { + it('should not write to non existent file, expect error', function () { const testDir = `/test-${hat()}` - ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), (err) => { - expect(err).to.exist() - done() - }) + return expect(ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'))).to.eventually.be.rejected() }) - it('should write to non existent file with create flag', function (done) { + it('should write to non existent file with create flag', async function () { const testPath = `/test-${hat()}` - ipfs.files.write(testPath, Buffer.from('Hello, world!'), { create: true }, (err) => { - expect(err).to.not.exist() + await ipfs.files.write(testPath, Buffer.from('Hello, world!'), { create: true }) - ipfs.files.stat(testPath, (err, stats) => { - expect(err).to.not.exist() - expect(stats.type).to.equal('file') - done() - }) - }) + const stats = await ipfs.files.stat(testPath) + expect(stats.type).to.equal('file') }) - it('should write to deeply nested non existent file with create and parents flags', function (done) { + it('should write to deeply nested non existent file with create and parents flags', async function () { const testPath = `/foo/bar/baz/test-${hat()}` - ipfs.files.write(testPath, Buffer.from('Hello, world!'), { create: true, parents: true }, (err) => { - expect(err).to.not.exist() + await ipfs.files.write(testPath, Buffer.from('Hello, world!'), { create: true, parents: true }) - ipfs.files.stat(testPath, (err, stats) => { - expect(err).to.not.exist() - expect(stats.type).to.equal('file') - done() - }) - }) + const stats = await ipfs.files.stat(testPath) + expect(stats.type).to.equal('file') }) }) } diff --git a/src/files-regular/add-from-fs.js b/src/files-regular/add-from-fs.js index b2f35cfa..5a0765e9 100644 --- a/src/files-regular/add-from-fs.js +++ b/src/files-regular/add-from-fs.js @@ -13,88 +13,64 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.addFromFs', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) const fixturesPath = path.join(__dirname, '../../test/fixtures') let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should add a directory from the file system', (done) => { + it('should add a directory from the file system', async () => { const filesPath = path.join(fixturesPath, 'test-folder') - ipfs.addFromFs(filesPath, { recursive: true }, (err, result) => { - expect(err).to.not.exist() - expect(result.length).to.be.above(8) - done() - }) + + const result = await ipfs.addFromFs(filesPath, { recursive: true }) + expect(result.length).to.be.above(8) }) - it('should add a directory from the file system with an odd name', (done) => { + it('should add a directory from the file system with an odd name', async () => { const filesPath = path.join(fixturesPath, 'weird name folder [v0]') - ipfs.addFromFs(filesPath, { recursive: true }, (err, result) => { - expect(err).to.not.exist() - expect(result.length).to.be.above(8) - done() - }) + + const result = await ipfs.addFromFs(filesPath, { recursive: true }) + expect(result.length).to.be.above(8) }) - it('should ignore a directory from the file system', (done) => { + it('should ignore a directory from the file system', async () => { const filesPath = path.join(fixturesPath, 'test-folder') - ipfs.addFromFs(filesPath, { recursive: true, ignore: ['files/**'] }, (err, result) => { - expect(err).to.not.exist() - expect(result.length).to.be.below(9) - done() - }) + + const result = await ipfs.addFromFs(filesPath, { recursive: true, ignore: ['files/**'] }) + expect(result.length).to.be.below(9) }) - it('should add a file from the file system', (done) => { + it('should add a file from the file system', async () => { const filePath = path.join(fixturesPath, 'testfile.txt') - ipfs.addFromFs(filePath, (err, result) => { - expect(err).to.not.exist() - expect(result.length).to.equal(1) - expect(result[0].path).to.equal('testfile.txt') - done() - }) + + const result = await ipfs.addFromFs(filePath) + expect(result.length).to.equal(1) + expect(result[0].path).to.equal('testfile.txt') }) - it('should add a hidden file in a directory from the file system', (done) => { + it('should add a hidden file in a directory from the file system', async () => { const filesPath = path.join(fixturesPath, 'hidden-files-folder') - ipfs.addFromFs(filesPath, { recursive: true, hidden: true }, (err, result) => { - expect(err).to.not.exist() - expect(result.length).to.be.above(10) - expect(result.map(object => object.path)).to.include('hidden-files-folder/.hiddenTest.txt') - expect(result.map(object => object.hash)).to.include('QmdbAjVmLRdpFyi8FFvjPfhTGB2cVXvWLuK7Sbt38HXrtt') - done() - }) + + const result = await ipfs.addFromFs(filesPath, { recursive: true, hidden: true }) + expect(result.length).to.be.above(10) + expect(result.map(object => object.path)).to.include('hidden-files-folder/.hiddenTest.txt') + expect(result.map(object => object.hash)).to.include('QmdbAjVmLRdpFyi8FFvjPfhTGB2cVXvWLuK7Sbt38HXrtt') }) - it('should add a file from the file system with only-hash=true', function () { + it('should add a file from the file system with only-hash=true', async function () { this.slow(10 * 1000) const content = String(Math.random() + Date.now()) const filepath = path.join(os.tmpdir(), `${content}.txt`) fs.writeFileSync(filepath, content) - return ipfs.addFromFs(filepath, { onlyHash: true }) - .then(out => { - fs.unlinkSync(filepath) - return expectTimeout(ipfs.object.get(out[0].hash), 4000) - }) + const out = await ipfs.addFromFs(filepath, { onlyHash: true }) + + fs.unlinkSync(filepath) + await expectTimeout(ipfs.object.get(out[0].hash), 4000) }) }) } diff --git a/src/files-regular/add-from-stream.js b/src/files-regular/add-from-stream.js index 96ae0938..34ee1c3c 100644 --- a/src/files-regular/add-from-stream.js +++ b/src/files-regular/add-from-stream.js @@ -11,28 +11,15 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.addFromStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should add from a stream', (done) => { + it('should add from a stream', async () => { const stream = new Readable({ read () { this.push(fixtures.bigFile.data) @@ -40,12 +27,9 @@ module.exports = (createCommon, options) => { } }) - ipfs.addFromStream(stream, (err, result) => { - expect(err).to.not.exist() - expect(result.length).to.equal(1) - expect(result[0].hash).to.equal(fixtures.bigFile.cid) - done() - }) + const result = await ipfs.addFromStream(stream) + expect(result.length).to.equal(1) + expect(result[0].hash).to.equal(fixtures.bigFile.cid) }) }) } diff --git a/src/files-regular/add-from-url.js b/src/files-regular/add-from-url.js index 87d3e530..c57fc7e4 100644 --- a/src/files-regular/add-from-url.js +++ b/src/files-regular/add-from-url.js @@ -1,8 +1,8 @@ /* eslint-env mocha */ 'use strict' +const pTimeout = require('p-timeout') const { getDescribe, getIt, expect } = require('../utils/mocha') -const parallel = require('async/parallel') const { echoUrl, redirectUrl } = require('../utils/echo-http-server') module.exports = (createCommon, options) => { @@ -11,120 +11,98 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.addFromURL', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should add from a HTTP URL', (done) => { + it('should add from a HTTP URL', async () => { const text = `TEST${Date.now()}` const url = echoUrl(text) - parallel({ - result: (cb) => ipfs.addFromURL(url, cb), - expectedResult: (cb) => ipfs.add(Buffer.from(text), cb) - }, (err, { result, expectedResult }) => { - expect(err).to.not.exist() - expect(result.err).to.not.exist() - expect(expectedResult.err).to.not.exist() - expect(result[0].hash).to.equal(expectedResult[0].hash) - expect(result[0].size).to.equal(expectedResult[0].size) - expect(result[0].path).to.equal(text) - done() - }) + + const [result, expectedResult] = await Promise.all([ + ipfs.addFromURL(url), + ipfs.add(Buffer.from(text)) + ]) + + expect(result.err).to.not.exist() + expect(expectedResult.err).to.not.exist() + expect(result[0].hash).to.equal(expectedResult[0].hash) + expect(result[0].size).to.equal(expectedResult[0].size) + expect(result[0].path).to.equal(text) }) - it('should add from a HTTP URL with redirection', (done) => { + it('should add from a HTTP URL with redirection', async () => { const text = `TEST${Date.now()}` const url = echoUrl(text) + '?foo=bar#buzz' - parallel({ - result: (cb) => ipfs.addFromURL(redirectUrl(url), cb), - expectedResult: (cb) => ipfs.add(Buffer.from(text), cb) - }, (err, { result, expectedResult }) => { - expect(err).to.not.exist() - expect(result.err).to.not.exist() - expect(expectedResult.err).to.not.exist() - expect(result[0].hash).to.equal(expectedResult[0].hash) - expect(result[0].size).to.equal(expectedResult[0].size) - expect(result[0].path).to.equal(text) - done() - }) + const [result, expectedResult] = await Promise.all([ + ipfs.addFromURL(redirectUrl(url)), + ipfs.add(Buffer.from(text)) + ]) + + expect(result.err).to.not.exist() + expect(expectedResult.err).to.not.exist() + expect(result[0].hash).to.equal(expectedResult[0].hash) + expect(result[0].size).to.equal(expectedResult[0].size) + expect(result[0].path).to.equal(text) }) - it('should add from a URL with only-hash=true', (done) => { + it('should add from a URL with only-hash=true', async function () { const text = `TEST${Date.now()}` const url = echoUrl(text) - ipfs.addFromURL(url, { onlyHash: true }, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.addFromURL(url, { onlyHash: true }) + + try { // A successful object.get for this size data took my laptop ~14ms - let didTimeout = false - const timeoutId = setTimeout(() => { - didTimeout = true - done() - }, 500) - - ipfs.object.get(res[0].hash, () => { - clearTimeout(timeoutId) - if (didTimeout) return - expect(new Error('did not timeout')).to.not.exist() - }) - }) + await pTimeout(ipfs.object.get(res[0].hash), 500) + } catch (err) { + if (err.name === 'TimeoutError') { + // This doesn't seem to be the right approach: + // the test shouldn't be passing when it gets a timeout error + // but this is pretty the same logic as the previous callback one + return Promise.resolve() + } + + throw err + } }) - it('should add from a URL with wrap-with-directory=true', (done) => { + it('should add from a URL with wrap-with-directory=true', async () => { const filename = `TEST${Date.now()}.txt` // also acts as data const url = echoUrl(filename) + '?foo=bar#buzz' const addOpts = { wrapWithDirectory: true } - parallel({ - result: (cb) => ipfs.addFromURL(url, addOpts, cb), - expectedResult: (cb) => ipfs.add([{ path: filename, content: Buffer.from(filename) }], addOpts, cb) - }, (err, { result, expectedResult }) => { - expect(err).to.not.exist() - expect(result.err).to.not.exist() - expect(expectedResult.err).to.not.exist() - expect(result).to.deep.equal(expectedResult) - done() - }) + + const [result, expectedResult] = await Promise.all([ + ipfs.addFromURL(url, addOpts), + ipfs.add([{ path: filename, content: Buffer.from(filename) }], addOpts) + ]) + expect(result.err).to.not.exist() + expect(expectedResult.err).to.not.exist() + expect(result).to.deep.equal(expectedResult) }) - it('should add from a URL with wrap-with-directory=true and URL-escaped file name', (done) => { + it('should add from a URL with wrap-with-directory=true and URL-escaped file name', async () => { const filename = `320px-Domažlice,_Jiráskova_43_(${Date.now()}).jpg` // also acts as data const url = echoUrl(filename) + '?foo=bar#buzz' const addOpts = { wrapWithDirectory: true } - parallel({ - result: (cb) => ipfs.addFromURL(url, addOpts, cb), - expectedResult: (cb) => ipfs.add([{ path: filename, content: Buffer.from(filename) }], addOpts, cb) - }, (err, { result, expectedResult }) => { - expect(err).to.not.exist() - expect(result.err).to.not.exist() - expect(expectedResult.err).to.not.exist() - expect(result).to.deep.equal(expectedResult) - done() - }) + + const [result, expectedResult] = await Promise.all([ + ipfs.addFromURL(url, addOpts), + ipfs.add([{ path: filename, content: Buffer.from(filename) }], addOpts) + ]) + + expect(result.err).to.not.exist() + expect(expectedResult.err).to.not.exist() + expect(result).to.deep.equal(expectedResult) }) - it('should not add from an invalid url', (done) => { - ipfs.addFromURL('123http://invalid', (err, result) => { - expect(err).to.exist() - expect(result).to.not.exist() - done() - }) + it('should not add from an invalid url', () => { + return expect(ipfs.addFromURL('123http://invalid')).to.eventually.be.rejected() }) }) } diff --git a/src/files-regular/add-pull-stream.js b/src/files-regular/add-pull-stream.js index e51eb3eb..9dd75384 100644 --- a/src/files-regular/add-pull-stream.js +++ b/src/files-regular/add-pull-stream.js @@ -4,6 +4,7 @@ const { fixtures } = require('./utils') const pull = require('pull-stream') const { getDescribe, getIt, expect } = require('../utils/mocha') +const pullToPromise = require('pull-to-promise') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -11,28 +12,15 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.addPullStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) + before(async () => { ipfs = await common.setup() }) - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + after(() => common.teardown()) - after((done) => common.teardown(done)) - - it('should add pull stream of valid files and dirs', function (done) { + it('should add pull stream of valid files and dirs', async function () { const content = (name) => ({ path: `test-folder/${name}`, content: fixtures.directory.files[name] @@ -53,35 +41,21 @@ module.exports = (createCommon, options) => { const stream = ipfs.addPullStream() - pull( - pull.values(files), - stream, - pull.collect((err, filesAdded) => { - expect(err).to.not.exist() + const filesAdded = await pullToPromise.any(pull(pull.values(files), stream)) + const testFolderIndex = filesAdded.length - 1 - filesAdded.forEach((file) => { - if (file.path === 'test-folder') { - expect(file.hash).to.equal(fixtures.directory.cid) - done() - } - }) - }) - ) + expect(filesAdded).to.have.nested.property(`[${testFolderIndex}].path`, 'test-folder') + expect(filesAdded).to.have.nested.property(`[${testFolderIndex}].hash`, fixtures.directory.cid) }) - it('should add with object chunks and pull stream content', (done) => { + it('should add with object chunks and pull stream content', async () => { const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' + const data = [{ content: pull.values([Buffer.from('test')]) }] + const stream = ipfs.addPullStream() - pull( - pull.values([{ content: pull.values([Buffer.from('test')]) }]), - ipfs.addPullStream(), - pull.collect((err, res) => { - if (err) return done(err) - expect(res).to.have.length(1) - expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) - done() - }) - ) + const res = await pullToPromise.any(pull(pull.values(data), stream)) + expect(res).to.have.property('length', 1) + expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) }) }) } diff --git a/src/files-regular/add-readable-stream.js b/src/files-regular/add-readable-stream.js index 29187548..749ac1e7 100644 --- a/src/files-regular/add-readable-stream.js +++ b/src/files-regular/add-readable-stream.js @@ -3,6 +3,7 @@ const { fixtures } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') +const getStream = require('get-stream') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -10,28 +11,15 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.addReadableStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) + before(async () => { ipfs = await common.setup() }) - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should add readable stream of valid files and dirs', function (done) { + it('should add readable stream of valid files and dirs', async function () { const content = (name) => ({ path: `test-folder/${name}`, content: fixtures.directory.files[name] @@ -52,20 +40,13 @@ module.exports = (createCommon, options) => { const stream = ipfs.addReadableStream() - stream.on('error', (err) => { - expect(err).to.not.exist() - }) - - stream.on('data', (file) => { - if (file.path === 'test-folder') { - expect(file.hash).to.equal(fixtures.directory.cid) - } - }) - - stream.on('end', done) - files.forEach((file) => stream.write(file)) stream.end() + + const filesArray = await getStream.array(stream) + const file = filesArray[filesArray.length - 1] + + expect(file.hash).to.equal(fixtures.directory.cid) }) }) } diff --git a/src/files-regular/add.js b/src/files-regular/add.js index 4d3c34d9..fdea60e4 100644 --- a/src/files-regular/add.js +++ b/src/files-regular/add.js @@ -18,36 +18,18 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) - - it('should add a File', function (done) { - if (supportsFileReader) { - ipfs.add(new self.File(['should add a File'], 'filename.txt', { type: 'text/plain' }), (err, filesAdded) => { - expect(err).to.not.exist() - expect(filesAdded[0].hash).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC') - done() - }) - } else { - this.skip('skip in node') - } + after(() => common.teardown()) + + it('should add a File', async function () { + if (!supportsFileReader) return this.skip('skip in node') + + const filesAdded = await ipfs.add(new self.File(['should add a File'], 'filename.txt', { type: 'text/plain' })) + expect(filesAdded[0].hash).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC') }) - it('should add a File as tuple', function (done) { + it('should add a File as tuple', async function () { if (!supportsFileReader) return this.skip('skip in node') const tuple = { @@ -55,14 +37,11 @@ module.exports = (createCommon, options) => { content: new self.File(['should add a File'], 'filename.txt', { type: 'text/plain' }) } - ipfs.add(tuple, (err, filesAdded) => { - expect(err).to.not.exist() - expect(filesAdded[0].hash).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC') - done() - }) + const filesAdded = await ipfs.add(tuple) + expect(filesAdded[0].hash).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC') }) - it('should add a File as array of tuple', function (done) { + it('should add a File as array of tuple', async function () { if (!supportsFileReader) return this.skip('skip in node') const tuple = { @@ -70,51 +49,33 @@ module.exports = (createCommon, options) => { content: new self.File(['should add a File'], 'filename.txt', { type: 'text/plain' }) } - ipfs.add([tuple], (err, filesAdded) => { - expect(err).to.not.exist() - expect(filesAdded[0].hash).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC') - done() - }) + const filesAdded = await ipfs.add([tuple]) + expect(filesAdded[0].hash).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC') }) - it('should add a Buffer', (done) => { - ipfs.add(fixtures.smallFile.data, (err, filesAdded) => { - expect(err).to.not.exist() - - expect(filesAdded).to.have.length(1) - const file = filesAdded[0] - expect(file.hash).to.equal(fixtures.smallFile.cid) - expect(file.path).to.equal(fixtures.smallFile.cid) - // file.size counts the overhead by IPLD nodes and unixfs protobuf - expect(file.size).greaterThan(fixtures.smallFile.data.length) - done() - }) - }) + it('should add a Buffer', async () => { + const filesAdded = await ipfs.add(fixtures.smallFile.data) + expect(filesAdded).to.have.length(1) - it('should add a Buffer (promised)', () => { - return ipfs.add(fixtures.smallFile.data) - .then((filesAdded) => { - const file = filesAdded[0] - expect(file.hash).to.equal(fixtures.smallFile.cid) - expect(file.path).to.equal(fixtures.smallFile.cid) - }) + const file = filesAdded[0] + expect(file.hash).to.equal(fixtures.smallFile.cid) + expect(file.path).to.equal(fixtures.smallFile.cid) + // file.size counts the overhead by IPLD nodes and unixfs protobuf + expect(file.size).greaterThan(fixtures.smallFile.data.length) }) - it('should add a BIG Buffer', (done) => { - ipfs.add(fixtures.bigFile.data, (err, filesAdded) => { - expect(err).to.not.exist() - - expect(filesAdded).to.have.length(1) - const file = filesAdded[0] - expect(file.hash).to.equal(fixtures.bigFile.cid) - expect(file.path).to.equal(fixtures.bigFile.cid) - // file.size counts the overhead by IPLD nodes and unixfs protobuf - expect(file.size).greaterThan(fixtures.bigFile.data.length) - done() - }) + it('should add a BIG Buffer', async () => { + const filesAdded = await ipfs.add(fixtures.bigFile.data) + expect(filesAdded).to.have.length(1) + + const file = filesAdded[0] + expect(file.hash).to.equal(fixtures.bigFile.cid) + expect(file.path).to.equal(fixtures.bigFile.cid) + // file.size counts the overhead by IPLD nodes and unixfs protobuf + expect(file.size).greaterThan(fixtures.bigFile.data.length) }) - it('should add a BIG Buffer with progress enabled', (done) => { + it('should add a BIG Buffer with progress enabled', async () => { let progCalled = false let accumProgress = 0 function handler (p) { @@ -122,89 +83,70 @@ module.exports = (createCommon, options) => { accumProgress = p } - ipfs.add(fixtures.bigFile.data, { progress: handler }, (err, filesAdded) => { - expect(err).to.not.exist() + const filesAdded = await ipfs.add(fixtures.bigFile.data, { progress: handler }) + expect(filesAdded).to.have.length(1) - expect(filesAdded).to.have.length(1) - const file = filesAdded[0] - expect(file.hash).to.equal(fixtures.bigFile.cid) - expect(file.path).to.equal(fixtures.bigFile.cid) - - expect(progCalled).to.be.true() - expect(accumProgress).to.equal(fixtures.bigFile.data.length) - done() - }) + const file = filesAdded[0] + expect(file.hash).to.equal(fixtures.bigFile.cid) + expect(file.path).to.equal(fixtures.bigFile.cid) + expect(progCalled).to.be.true() + expect(accumProgress).to.equal(fixtures.bigFile.data.length) }) - it('should add a Buffer as tuple', (done) => { + it('should add a Buffer as tuple', async () => { const tuple = { path: 'testfile.txt', content: fixtures.smallFile.data } - ipfs.add([ - tuple - ], (err, filesAdded) => { - expect(err).to.not.exist() + const filesAdded = await ipfs.add([tuple]) + expect(filesAdded).to.have.length(1) - expect(filesAdded).to.have.length(1) - const file = filesAdded[0] - expect(file.hash).to.equal(fixtures.smallFile.cid) - expect(file.path).to.equal('testfile.txt') - - done() - }) + const file = filesAdded[0] + expect(file.hash).to.equal(fixtures.smallFile.cid) + expect(file.path).to.equal('testfile.txt') }) - it('should add a string', (done) => { + it('should add a string', async () => { const data = 'a string' const expectedCid = 'QmQFRCwEpwQZ5aQMqCsCaFbdjNLLHoyZYDjr92v1F7HeqX' - ipfs.add(data, (err, filesAdded) => { - expect(err).to.not.exist() + const filesAdded = await ipfs.add(data) + expect(filesAdded).to.be.length(1) - expect(filesAdded).to.be.length(1) - const { path, size, hash } = filesAdded[0] - expect(path).to.equal(expectedCid) - expect(size).to.equal(16) - expect(hash).to.equal(expectedCid) - done() - }) + const { path, size, hash } = filesAdded[0] + expect(path).to.equal(expectedCid) + expect(size).to.equal(16) + expect(hash).to.equal(expectedCid) }) - it('should add a TypedArray', (done) => { + it('should add a TypedArray', async () => { const data = Uint8Array.from([1, 3, 8]) const expectedCid = 'QmRyUEkVCuHC8eKNNJS9BDM9jqorUvnQJK1DM81hfngFqd' - ipfs.add(data, (err, filesAdded) => { - expect(err).to.not.exist() + const filesAdded = await ipfs.add(data) + expect(filesAdded).to.be.length(1) - expect(filesAdded).to.be.length(1) - const { path, size, hash } = filesAdded[0] - expect(path).to.equal(expectedCid) - expect(size).to.equal(11) - expect(hash).to.equal(expectedCid) - done() - }) + const { path, size, hash } = filesAdded[0] + expect(path).to.equal(expectedCid) + expect(size).to.equal(11) + expect(hash).to.equal(expectedCid) }) - it('should add readable stream', (done) => { + it('should add readable stream', async () => { const expectedCid = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS' const rs = new Readable() rs.push(Buffer.from('some data')) rs.push(null) - ipfs.add(rs, (err, filesAdded) => { - expect(err).to.not.exist() + const filesAdded = await ipfs.add(rs) + expect(filesAdded).to.be.length(1) - expect(filesAdded).to.be.length(1) - const file = filesAdded[0] - expect(file.path).to.equal(expectedCid) - expect(file.size).to.equal(17) - expect(file.hash).to.equal(expectedCid) - done() - }) + const file = filesAdded[0] + expect(file.path).to.equal(expectedCid) + expect(file.size).to.equal(17) + expect(file.hash).to.equal(expectedCid) }) - it('should add array of objects with readable stream content', (done) => { + it('should add array of objects with readable stream content', async () => { const expectedCid = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS' const rs = new Readable() @@ -213,50 +155,32 @@ module.exports = (createCommon, options) => { const tuple = { path: 'data.txt', content: rs } - ipfs.add([tuple], (err, filesAdded) => { - expect(err).to.not.exist() - - expect(filesAdded).to.be.length(1) - const file = filesAdded[0] - expect(file.path).to.equal('data.txt') - expect(file.size).to.equal(17) - expect(file.hash).to.equal(expectedCid) - done() - }) - }) - - it('should add pull stream', (done) => { - const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' + const filesAdded = await ipfs.add([tuple]) + expect(filesAdded).to.be.length(1) - ipfs.add(pull.values([Buffer.from('test')]), (err, res) => { - if (err) return done(err) - expect(res).to.have.length(1) - expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) - done() - }) + const file = filesAdded[0] + expect(file.path).to.equal('data.txt') + expect(file.size).to.equal(17) + expect(file.hash).to.equal(expectedCid) }) - it('should add pull stream (promised)', () => { + it('should add pull stream', async () => { const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' - return ipfs.add(pull.values([Buffer.from('test')])) - .then((res) => { - expect(res).to.have.length(1) - expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) - }) + const res = await ipfs.add(pull.values([Buffer.from('test')])) + expect(res).to.have.length(1) + expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) }) - it('should add array of objects with pull stream content (promised)', () => { + it('should add array of objects with pull stream content', async () => { const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' - return ipfs.add([{ content: pull.values([Buffer.from('test')]) }]) - .then((res) => { - expect(res).to.have.length(1) - expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) - }) + const res = await ipfs.add([{ content: pull.values([Buffer.from('test')]) }]) + expect(res).to.have.length(1) + expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) }) - it('should add a nested directory as array of tupples', function (done) { + it('should add a nested directory as array of tupples', async function () { const content = (name) => ({ path: `test-folder/${name}`, content: fixtures.directory.files[name] @@ -275,17 +199,14 @@ module.exports = (createCommon, options) => { emptyDir('files/empty') ] - ipfs.add(dirs, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] + const res = await ipfs.add(dirs) - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(fixtures.directory.cid) - done() - }) + const root = res[res.length - 1] + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(fixtures.directory.cid) }) - it('should add a nested directory as array of tupples with progress', function (done) { + it('should add a nested directory as array of tupples with progress', async function () { const content = (name) => ({ path: `test-folder/${name}`, content: fixtures.directory.files[name] @@ -315,19 +236,16 @@ module.exports = (createCommon, options) => { accumProgress += p } - ipfs.add(dirs, { progress: handler }, (err, filesAdded) => { - expect(err).to.not.exist() - const root = filesAdded[filesAdded.length - 1] + const filesAdded = await ipfs.add(dirs, { progress: handler }) - expect(progCalled).to.be.true() - expect(accumProgress).to.be.at.least(total) - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(fixtures.directory.cid) - done() - }) + const root = filesAdded[filesAdded.length - 1] + expect(progCalled).to.be.true() + expect(accumProgress).to.be.at.least(total) + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(fixtures.directory.cid) }) - it('should add files to a directory non sequentially', function (done) { + it('should add files to a directory non sequentially', async function () { const content = path => ({ path: `test-dir/${path}`, content: fixtures.directory.files[path.split('/').pop()] @@ -340,55 +258,42 @@ module.exports = (createCommon, options) => { content('a/alice.txt') ] - ipfs.add(input, (err, filesAdded) => { - expect(err).to.not.exist() - - const toPath = ({ path }) => path - const nonSeqDirFilePaths = input.map(toPath).filter(p => p.includes('/a/')) - const filesAddedPaths = filesAdded.map(toPath) + const filesAdded = await ipfs.add(input) - expect(nonSeqDirFilePaths.every(p => filesAddedPaths.includes(p))) - .to.be.true() + const toPath = ({ path }) => path + const nonSeqDirFilePaths = input.map(toPath).filter(p => p.includes('/a/')) + const filesAddedPaths = filesAdded.map(toPath) - done() - }) + expect(nonSeqDirFilePaths.every(p => filesAddedPaths.includes(p))).to.be.true() }) - it('should fail when passed invalid input', (done) => { + it('should fail when passed invalid input', () => { const nonValid = 138 - ipfs.add(nonValid, (err, result) => { - expect(err).to.exist() - done() - }) + return expect(ipfs.add(nonValid)).to.eventually.be.rejected() }) - it('should wrap content in a directory', (done) => { + it('should wrap content in a directory', async () => { const data = { path: 'testfile.txt', content: fixtures.smallFile.data } - ipfs.add(data, { wrapWithDirectory: true }, (err, filesAdded) => { - expect(err).to.not.exist() - expect(filesAdded).to.have.length(2) - const file = filesAdded[0] - const wrapped = filesAdded[1] - expect(file.hash).to.equal(fixtures.smallFile.cid) - expect(file.path).to.equal('testfile.txt') - expect(wrapped.path).to.equal('') - done() - }) + const filesAdded = await ipfs.add(data, { wrapWithDirectory: true }) + expect(filesAdded).to.have.length(2) + + const file = filesAdded[0] + const wrapped = filesAdded[1] + expect(file.hash).to.equal(fixtures.smallFile.cid) + expect(file.path).to.equal('testfile.txt') + expect(wrapped.path).to.equal('') }) - it('should add with only-hash=true (promised)', function () { + it('should add with only-hash=true', async function () { this.slow(10 * 1000) const content = String(Math.random() + Date.now()) - return ipfs.add(Buffer.from(content), { onlyHash: true }) - .then(files => { - expect(files).to.have.length(1) + const files = await ipfs.add(Buffer.from(content), { onlyHash: true }) + expect(files).to.have.length(1) - // 'ipfs.object.get()' should timeout because content wasn't actually added - return expectTimeout(ipfs.object.get(files[0].hash), 4000) - }) + await expectTimeout(ipfs.object.get(files[0].hash), 4000) }) }) } diff --git a/src/files-regular/cat-pull-stream.js b/src/files-regular/cat-pull-stream.js index 37fd3da0..28aa765e 100644 --- a/src/files-regular/cat-pull-stream.js +++ b/src/files-regular/cat-pull-stream.js @@ -2,8 +2,8 @@ 'use strict' const { fixtures } = require('./utils') -const pull = require('pull-stream') const { getDescribe, getIt, expect } = require('../utils/mocha') +const pullToPromise = require('pull-to-promise') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -11,44 +11,25 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.catPullStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) + before(async () => { ipfs = await common.setup() }) - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - before((done) => ipfs.add(fixtures.smallFile.data, done)) + before(() => ipfs.add(fixtures.smallFile.data)) + after(() => common.teardown()) - after((done) => common.teardown(done)) - - it('should return a Pull Stream for a CID', (done) => { + it('should return a Pull Stream for a CID', async () => { const stream = ipfs.catPullStream(fixtures.smallFile.cid) - pull( - stream, - pull.concat((err, data) => { - expect(err).to.not.exist() - expect(data.length).to.equal(fixtures.smallFile.data.length) - expect(data).to.eql(fixtures.smallFile.data.toString()) - done() - }) - ) + const data = Buffer.concat(await pullToPromise.any(stream)) + + expect(data.length).to.equal(fixtures.smallFile.data.length) + expect(data.toString()).to.deep.equal(fixtures.smallFile.data.toString()) }) - it('should export a chunk of a file in a Pull Stream', (done) => { + it('should export a chunk of a file in a Pull Stream', async () => { const offset = 1 const length = 3 @@ -57,14 +38,8 @@ module.exports = (createCommon, options) => { length }) - pull( - stream, - pull.concat((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.equal('lz ') - done() - }) - ) + const data = Buffer.concat(await pullToPromise.any(stream)) + expect(data.toString()).to.equal('lz ') }) }) } diff --git a/src/files-regular/cat-readable-stream.js b/src/files-regular/cat-readable-stream.js index 29885fa9..16f3e98d 100644 --- a/src/files-regular/cat-readable-stream.js +++ b/src/files-regular/cat-readable-stream.js @@ -2,8 +2,8 @@ 'use strict' const { fixtures } = require('./utils') -const bl = require('bl') const { getDescribe, getIt, expect } = require('../utils/mocha') +const getStream = require('get-stream') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -11,41 +11,26 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.catReadableStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() + await ipfs.add(fixtures.bigFile.data) + await ipfs.add(fixtures.smallFile.data) }) - before((done) => ipfs.add(fixtures.bigFile.data, done)) - before((done) => ipfs.add(fixtures.smallFile.data, done)) - - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should return a Readable Stream for a CID', (done) => { + it('should return a Readable Stream for a CID', async () => { const stream = ipfs.catReadableStream(fixtures.bigFile.cid) + const data = await getStream.buffer(stream) - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data).to.eql(fixtures.bigFile.data) - done() - })) + expect(data).to.eql(fixtures.bigFile.data) }) - it('should export a chunk of a file in a Readable Stream', (done) => { + it('should export a chunk of a file in a Readable Stream', async () => { const offset = 1 const length = 3 @@ -54,11 +39,8 @@ module.exports = (createCommon, options) => { length }) - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.equal('lz ') - done() - })) + const data = await getStream.buffer(stream) + expect(data.toString()).to.equal('lz ') }) }) } diff --git a/src/files-regular/cat.js b/src/files-regular/cat.js index 0710e22a..2681e689 100644 --- a/src/files-regular/cat.js +++ b/src/files-regular/cat.js @@ -3,7 +3,6 @@ const { fixtures } = require('./utils') const bs58 = require('bs58') -const parallel = require('async/parallel') const CID = require('cids') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -13,217 +12,141 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.cat', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - before((done) => { - parallel([ - (cb) => ipfs.add(fixtures.smallFile.data, cb), - (cb) => ipfs.add(fixtures.bigFile.data, cb) - ], done) - }) + before(() => Promise.all([ + ipfs.add(fixtures.smallFile.data), + ipfs.add(fixtures.bigFile.data) + ])) - it('should cat with a base58 string encoded multihash', (done) => { - ipfs.cat(fixtures.smallFile.cid, (err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - }) + it('should cat with a base58 string encoded multihash', async () => { + const data = await ipfs.cat(fixtures.smallFile.cid) + expect(data.toString()).to.contain('Plz add me!') }) - it('should cat with a base58 string encoded multihash (promised)', () => { - return ipfs.cat(fixtures.smallFile.cid) - .then((data) => { - expect(data.toString()).to.contain('Plz add me!') - }) - }) - - it('should cat with a Buffer multihash', (done) => { + it('should cat with a Buffer multihash', async () => { const cid = Buffer.from(bs58.decode(fixtures.smallFile.cid)) - ipfs.cat(cid, (err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - }) + const data = await ipfs.cat(cid) + expect(data.toString()).to.contain('Plz add me!') }) - it('should cat with a CID object', (done) => { + it('should cat with a CID object', async () => { const cid = new CID(fixtures.smallFile.cid) - ipfs.cat(cid, (err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - }) + const data = await ipfs.cat(cid) + expect(data.toString()).to.contain('Plz add me!') }) - it('should cat a file added as CIDv0 with a CIDv1', done => { + it('should cat a file added as CIDv0 with a CIDv1', async () => { const input = Buffer.from(`TEST${Date.now()}`) - ipfs.add(input, { cidVersion: 0 }, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.add(input, { cidVersion: 0 }) - const cidv0 = new CID(res[0].hash) - expect(cidv0.version).to.equal(0) + const cidv0 = new CID(res[0].hash) + expect(cidv0.version).to.equal(0) - const cidv1 = cidv0.toV1() + const cidv1 = cidv0.toV1() - ipfs.cat(cidv1, (err, output) => { - expect(err).to.not.exist() - expect(output).to.eql(input) - done() - }) - }) + const output = await ipfs.cat(cidv1) + expect(output).to.eql(input) }) - it('should cat a file added as CIDv1 with a CIDv0', done => { + it('should cat a file added as CIDv1 with a CIDv0', async () => { const input = Buffer.from(`TEST${Date.now()}`) - ipfs.add(input, { cidVersion: 1, rawLeaves: false }, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.add(input, { cidVersion: 1, rawLeaves: false }) - const cidv1 = new CID(res[0].hash) - expect(cidv1.version).to.equal(1) + const cidv1 = new CID(res[0].hash) + expect(cidv1.version).to.equal(1) - const cidv0 = cidv1.toV0() + const cidv0 = cidv1.toV0() - ipfs.cat(cidv0, (err, output) => { - expect(err).to.not.exist() - expect(output).to.eql(input) - done() - }) - }) + const output = await ipfs.cat(cidv0) + expect(output).to.eql(input) }) - it('should cat a BIG file', (done) => { - ipfs.cat(fixtures.bigFile.cid, (err, data) => { - expect(err).to.not.exist() - expect(data.length).to.equal(fixtures.bigFile.data.length) - expect(data).to.eql(fixtures.bigFile.data) - done() - }) + it('should cat a BIG file', async () => { + const data = await ipfs.cat(fixtures.bigFile.cid) + expect(data.length).to.equal(fixtures.bigFile.data.length) + expect(data).to.eql(fixtures.bigFile.data) }) - it('should cat with IPFS path', (done) => { + it('should cat with IPFS path', async () => { const ipfsPath = '/ipfs/' + fixtures.smallFile.cid - ipfs.cat(ipfsPath, (err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - }) + const data = await ipfs.cat(ipfsPath) + expect(data.toString()).to.contain('Plz add me!') }) - it('should cat with IPFS path, nested value', (done) => { - const file = { path: 'a/testfile.txt', content: fixtures.smallFile.data } + it('should cat with IPFS path, nested value', async () => { + const fileToAdd = { path: 'a/testfile.txt', content: fixtures.smallFile.data } + + const filesAdded = await ipfs.add([fileToAdd]) - ipfs.add([file], (err, filesAdded) => { - expect(err).to.not.exist() + const file = await filesAdded.find((f) => f.path === 'a') + expect(file).to.exist() - const file = filesAdded.find((f) => f.path === 'a') - expect(file).to.exist() + const data = await ipfs.cat(`/ipfs/${file.hash}/testfile.txt`) - ipfs.cat(`/ipfs/${file.hash}/testfile.txt`, (err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - }) - }) + expect(data.toString()).to.contain('Plz add me!') }) - it('should cat with IPFS path, deeply nested value', (done) => { - const file = { path: 'a/b/testfile.txt', content: fixtures.smallFile.data } + it('should cat with IPFS path, deeply nested value', async () => { + const fileToAdd = { path: 'a/b/testfile.txt', content: fixtures.smallFile.data } - ipfs.add([file], (err, filesAdded) => { - expect(err).to.not.exist() + const filesAdded = await ipfs.add([fileToAdd]) - const file = filesAdded.find((f) => f.path === 'a') - expect(file).to.exist() + const file = filesAdded.find((f) => f.path === 'a') + expect(file).to.exist() - ipfs.cat(`/ipfs/${file.hash}/b/testfile.txt`, (err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - }) - }) + const data = await ipfs.cat(`/ipfs/${file.hash}/b/testfile.txt`) + expect(data.toString()).to.contain('Plz add me!') }) - it('should error on invalid key (promised)', () => { + it('should error on invalid key', () => { const invalidCid = 'somethingNotMultihash' - return ipfs.cat(invalidCid) - .catch((err) => { - expect(err).to.exist() - - const errString = err.toString() - if (errString === 'Error: invalid ipfs ref path') { - expect(err.toString()).to.contain('Error: invalid ipfs ref path') - } - - if (errString === 'Error: Invalid Key') { - expect(err.toString()).to.contain('Error: Invalid Key') - } - }) + return expect(ipfs.cat(invalidCid)).to.eventually.be.rejected() }) - it('should error on unknown path (promised)', () => { - return ipfs.cat(fixtures.smallFile.cid + '/does-not-exist') - .catch((err) => { - expect(err).to.exist() - expect(err.message).to.be.oneOf([ - 'file does not exist', - 'no link named "does-not-exist" under Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP']) - }) + it('should error on unknown path', () => { + return expect(ipfs.cat(fixtures.smallFile.cid + '/does-not-exist')).to.eventually.be.rejected() + .and.be.an.instanceOf(Error) + .and.to.have.property('message') + .to.be.oneOf([ + 'file does not exist', + 'no link named "does-not-exist" under Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' + ]) }) - it('should error on dir path (promised)', () => { + it('should error on dir path', async () => { const file = { path: 'dir/testfile.txt', content: fixtures.smallFile.data } - return ipfs.add([file]) - .then((filesAdded) => { - expect(filesAdded.length).to.equal(2) - const files = filesAdded.filter((file) => file.path === 'dir') - expect(files.length).to.equal(1) - const dir = files[0] - return ipfs.cat(dir.hash) - .catch((err) => { - expect(err).to.exist() - expect(err.message).to.contain('this dag node is a directory') - }) - }) + const filesAdded = await ipfs.add([file]) + expect(filesAdded.length).to.equal(2) + + const files = filesAdded.filter((file) => file.path === 'dir') + expect(files.length).to.equal(1) + + const dir = files[0] + + const err = await expect(ipfs.cat(dir.hash)).to.be.rejected() + expect(err.message).to.contain('this dag node is a directory') }) - it('should export a chunk of a file', (done) => { + it('should export a chunk of a file', async () => { const offset = 1 const length = 3 - ipfs.cat(fixtures.smallFile.cid, { - offset, - length - }, (err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.equal('lz ') - done() - }) + const data = await ipfs.cat(fixtures.smallFile.cid, { offset, length }) + expect(data.toString()).to.equal('lz ') }) }) } diff --git a/src/files-regular/get-pull-stream.js b/src/files-regular/get-pull-stream.js index 17855043..03f9c379 100644 --- a/src/files-regular/get-pull-stream.js +++ b/src/files-regular/get-pull-stream.js @@ -2,8 +2,8 @@ 'use strict' const { fixtures } = require('./utils') -const pull = require('pull-stream') const { getDescribe, getIt, expect } = require('../utils/mocha') +const pullToPromise = require('pull-to-promise') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -11,48 +11,23 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.getPullStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - before((done) => ipfs.add(fixtures.smallFile.data, done)) + before(() => ipfs.add(fixtures.smallFile.data)) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should return a Pull Stream of Pull Streams', (done) => { + it('should return a Pull Stream of Pull Streams', async () => { const stream = ipfs.getPullStream(fixtures.smallFile.cid) - pull( - stream, - pull.collect((err, files) => { - expect(err).to.not.exist() - expect(files).to.be.length(1) - expect(files[0].path).to.eql(fixtures.smallFile.cid) - pull( - files[0].content, - pull.concat((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - }) - ) - }) - ) + const files = await pullToPromise.any(stream) + + const data = Buffer.concat(await pullToPromise.any(files[0].content)) + expect(data.toString()).to.contain('Plz add me!') }) }) } diff --git a/src/files-regular/get-readable-stream.js b/src/files-regular/get-readable-stream.js index 98bfb351..e8244247 100644 --- a/src/files-regular/get-readable-stream.js +++ b/src/files-regular/get-readable-stream.js @@ -2,9 +2,9 @@ 'use strict' const { fixtures } = require('./utils') -const concat = require('concat-stream') const through = require('through2') const { getDescribe, getIt, expect } = require('../utils/mocha') +const getStream = require('get-stream') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -12,44 +12,35 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.getReadableStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() + await ipfs.add(fixtures.smallFile.data) }) - before((done) => ipfs.add(fixtures.smallFile.data, done)) - - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should return a Readable Stream of Readable Streams', (done) => { + it('should return a Readable Stream of Readable Streams', async () => { const stream = ipfs.getReadableStream(fixtures.smallFile.cid) - const files = [] - stream.pipe(through.obj((file, enc, next) => { - file.content.pipe(concat((content) => { - files.push({ path: file.path, content: content }) + // I was not able to use 'get-stream' module here + // as it exceeds the timeout. I think it might be related + // to 'pump' module that get-stream uses + const files = await new Promise((resolve, reject) => { + const filesArr = [] + stream.pipe(through.obj(async (file, enc, next) => { + const content = await getStream.buffer(file.content) + filesArr.push({ path: file.path, content: content }) next() - })) - }, () => { - expect(files).to.be.length(1) - expect(files[0].path).to.eql(fixtures.smallFile.cid) - expect(files[0].content.toString()).to.contain('Plz add me!') - done() - })) + }, () => resolve(filesArr))) + }) + + expect(files).to.be.length(1) + expect(files[0].path).to.eql(fixtures.smallFile.cid) + expect(files[0].content.toString()).to.contain('Plz add me!') }) }) } diff --git a/src/files-regular/get.js b/src/files-regular/get.js index 334a0c34..2bfe5d69 100644 --- a/src/files-regular/get.js +++ b/src/files-regular/get.js @@ -3,8 +3,6 @@ const { fixtures } = require('./utils') const bs58 = require('bs58') -const parallel = require('async/parallel') -const series = require('async/series') const CID = require('cids') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -14,250 +12,184 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.get', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - before((done) => { - parallel([ - (cb) => ipfs.add(fixtures.smallFile.data, cb), - (cb) => ipfs.add(fixtures.bigFile.data, cb) - ], done) + before(async () => { + ipfs = await common.setup() + await ipfs.add(fixtures.smallFile.data) + await ipfs.add(fixtures.bigFile.data) }) - after((done) => common.teardown(done)) - - it('should get with a base58 encoded multihash', (done) => { - ipfs.get(fixtures.smallFile.cid, (err, files) => { - expect(err).to.not.exist() + after(() => common.teardown()) - expect(files).to.be.length(1) - expect(files[0].path).to.eql(fixtures.smallFile.cid) - expect(files[0].content.toString('utf8')).to.contain('Plz add me!') - done() - }) - }) - - it('should get with a base58 encoded multihash (promised)', () => { - return ipfs.get(fixtures.smallFile.cid) - .then((files) => { - expect(files).to.be.length(1) - expect(files[0].path).to.equal(fixtures.smallFile.cid) - expect(files[0].content.toString()).to.contain('Plz add me!') - }) + it('should get with a base58 encoded multihash', async () => { + const files = await ipfs.get(fixtures.smallFile.cid) + expect(files).to.be.length(1) + expect(files[0].path).to.eql(fixtures.smallFile.cid) + expect(files[0].content.toString('utf8')).to.contain('Plz add me!') }) - it('should get with a Buffer multihash', (done) => { + it('should get with a Buffer multihash', async () => { const cidBuf = Buffer.from(bs58.decode(fixtures.smallFile.cid)) - ipfs.get(cidBuf, (err, files) => { - expect(err).to.not.exist() - expect(files).to.be.length(1) - expect(files[0].path).to.eql(fixtures.smallFile.cid) - expect(files[0].content.toString('utf8')).to.contain('Plz add me!') - done() - }) + const files = await ipfs.get(cidBuf) + expect(files).to.be.length(1) + expect(files[0].path).to.eql(fixtures.smallFile.cid) + expect(files[0].content.toString('utf8')).to.contain('Plz add me!') }) - it('should get a file added as CIDv0 with a CIDv1', done => { + it('should get a file added as CIDv0 with a CIDv1', async () => { const input = Buffer.from(`TEST${Date.now()}`) - ipfs.add(input, { cidVersion: 0 }, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.add(input, { cidVersion: 0 }) - const cidv0 = new CID(res[0].hash) - expect(cidv0.version).to.equal(0) + const cidv0 = new CID(res[0].hash) + expect(cidv0.version).to.equal(0) - const cidv1 = cidv0.toV1() + const cidv1 = cidv0.toV1() - ipfs.get(cidv1, (err, output) => { - expect(err).to.not.exist() - expect(output[0].content).to.eql(input) - done() - }) - }) + const output = await ipfs.get(cidv1) + expect(output[0].content).to.eql(input) }) - it('should get a file added as CIDv1 with a CIDv0', done => { + it('should get a file added as CIDv1 with a CIDv0', async () => { const input = Buffer.from(`TEST${Date.now()}`) - ipfs.add(input, { cidVersion: 1, rawLeaves: false }, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.add(input, { cidVersion: 1, rawLeaves: false }) - const cidv1 = new CID(res[0].hash) - expect(cidv1.version).to.equal(1) + const cidv1 = new CID(res[0].hash) + expect(cidv1.version).to.equal(1) - const cidv0 = cidv1.toV0() + const cidv0 = cidv1.toV0() - ipfs.get(cidv0, (err, output) => { - expect(err).to.not.exist() - expect(output[0].content).to.eql(input) - done() - }) - }) + const output = await ipfs.get(cidv0) + expect(output[0].content).to.eql(input) }) - it('should get a BIG file', (done) => { - ipfs.get(fixtures.bigFile.cid, (err, files) => { - expect(err).to.not.exist() + it('should get a BIG file', async () => { + const files = await ipfs.get(fixtures.bigFile.cid) + expect(files.length).to.equal(1) + expect(files[0].path).to.equal(fixtures.bigFile.cid) + expect(files[0].content.length).to.eql(fixtures.bigFile.data.length) + expect(files[0].content).to.eql(fixtures.bigFile.data) + }) - expect(files.length).to.equal(1) - expect(files[0].path).to.equal(fixtures.bigFile.cid) - expect(files[0].content.length).to.eql(fixtures.bigFile.data.length) - expect(files[0].content).to.eql(fixtures.bigFile.data) - done() + it('should get a directory', async function () { + const content = (name) => ({ + path: `test-folder/${name}`, + content: fixtures.directory.files[name] }) - }) - it('should get a directory', function (done) { - series([ - (cb) => { - const content = (name) => ({ - path: `test-folder/${name}`, - content: fixtures.directory.files[name] - }) - - const emptyDir = (name) => ({ path: `test-folder/${name}` }) - - const dirs = [ - content('pp.txt'), - content('holmes.txt'), - content('jungle.txt'), - content('alice.txt'), - emptyDir('empty-folder'), - content('files/hello.txt'), - content('files/ipfs.txt'), - emptyDir('files/empty') - ] - - ipfs.add(dirs, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] - - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(fixtures.directory.cid) - cb() - }) - }, - (cb) => { - ipfs.get(fixtures.directory.cid, (err, files) => { - expect(err).to.not.exist() - - files = files.sort((a, b) => { - if (a.path > b.path) return 1 - if (a.path < b.path) return -1 - return 0 - }) - - // Check paths - const paths = files.map((file) => { return file.path }) - expect(paths).to.include.members([ - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/empty', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/hello.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/ipfs.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt' - ]) - - // Check contents - const contents = files.map((file) => { - return file.content - ? file.content.toString() - : null - }) - - expect(contents).to.include.members([ - fixtures.directory.files['alice.txt'].toString(), - fixtures.directory.files['files/hello.txt'].toString(), - fixtures.directory.files['files/ipfs.txt'].toString(), - fixtures.directory.files['holmes.txt'].toString(), - fixtures.directory.files['jungle.txt'].toString(), - fixtures.directory.files['pp.txt'].toString() - ]) - cb() - }) - } - ], done) + const emptyDir = (name) => ({ path: `test-folder/${name}` }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + const res = await ipfs.add(dirs) + const root = res[res.length - 1] + + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(fixtures.directory.cid) + + let files = await ipfs.get(fixtures.directory.cid) + + files = files.sort((a, b) => { + if (a.path > b.path) return 1 + if (a.path < b.path) return -1 + return 0 + }) + + // Check paths + const paths = files.map((file) => { return file.path }) + expect(paths).to.include.members([ + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/empty', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/hello.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/ipfs.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt' + ]) + + // Check contents + const contents = files.map((file) => { + return file.content + ? file.content.toString() + : null + }) + + expect(contents).to.include.members([ + fixtures.directory.files['alice.txt'].toString(), + fixtures.directory.files['files/hello.txt'].toString(), + fixtures.directory.files['files/ipfs.txt'].toString(), + fixtures.directory.files['holmes.txt'].toString(), + fixtures.directory.files['jungle.txt'].toString(), + fixtures.directory.files['pp.txt'].toString() + ]) }) - it('should get with ipfs path, as object and nested value', (done) => { + it('should get with ipfs path, as object and nested value', async () => { const file = { path: 'a/testfile.txt', content: fixtures.smallFile.data } - ipfs.add(file, (err, filesAdded) => { - expect(err).to.not.exist() - - filesAdded.forEach((file) => { - if (file.path === 'a') { - ipfs.get(`/ipfs/${file.hash}/testfile.txt`, (err, files) => { - expect(err).to.not.exist() - expect(files).to.be.length(1) - expect(files[0].content.toString('utf8')).to.contain('Plz add me!') - done() - }) - } - }) + const filesAdded = await ipfs.add(file) + + filesAdded.forEach(async (file) => { + if (file.path === 'a') { + const files = await ipfs.get(`/ipfs/${file.hash}/testfile.txt`) + expect(files).to.be.length(1) + expect(files[0].content.toString('utf8')).to.contain('Plz add me!') + } }) }) - it('should get with ipfs path, as array and nested value', (done) => { + it('should get with ipfs path, as array and nested value', async () => { const file = { path: 'a/testfile.txt', content: fixtures.smallFile.data } - ipfs.add([file], (err, filesAdded) => { - expect(err).to.not.exist() - - filesAdded.forEach((file) => { - if (file.path === 'a') { - ipfs.get(`/ipfs/${file.hash}/testfile.txt`, (err, files) => { - expect(err).to.not.exist() - expect(files).to.be.length(1) - expect(files[0].content.toString('utf8')).to.contain('Plz add me!') - done() - }) - } - }) + const filesAdded = await ipfs.add([file]) + + filesAdded.forEach(async (file) => { + if (file.path === 'a') { + const files = await ipfs.get(`/ipfs/${file.hash}/testfile.txt`) + expect(files).to.be.length(1) + expect(files[0].content.toString('utf8')).to.contain('Plz add me!') + } }) }) - it('should error on invalid key', () => { + it('should error on invalid key', async () => { const invalidCid = 'somethingNotMultihash' - return ipfs.get(invalidCid) - .catch((err) => { - expect(err).to.exist() - const errString = err.toString() - if (errString === 'Error: invalid ipfs ref path') { - expect(err.toString()).to.contain('Error: invalid ipfs ref path') - } - if (errString === 'Error: Invalid Key') { - expect(err.toString()).to.contain('Error: Invalid Key') - } - }) + const err = await expect(ipfs.get(invalidCid)).to.be.rejected() + + switch (err.toString()) { + case 'Error: invalid ipfs ref path': + expect(err.toString()).to.contain('Error: invalid ipfs ref path') + break + case 'Error: Invalid Key': + expect(err.toString()).to.contain('Error: Invalid Key') + break + default: + break + } }) }) } diff --git a/src/files-regular/ls-pull-stream.js b/src/files-regular/ls-pull-stream.js index 7ac99a24..963b2da1 100644 --- a/src/files-regular/ls-pull-stream.js +++ b/src/files-regular/ls-pull-stream.js @@ -2,8 +2,8 @@ 'use strict' const { fixtures } = require('./utils') -const pull = require('pull-stream') const { getDescribe, getIt, expect } = require('../utils/mocha') +const pullToPromise = require('pull-to-promise') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -11,28 +11,17 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.lsPullStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should pull stream ls with a base58 encoded CID', function (done) { + it('should pull stream ls with a base58 encoded CID', async function () { const content = (name) => ({ path: `test-folder/${name}`, content: fixtures.directory.files[name] @@ -51,75 +40,66 @@ module.exports = (createCommon, options) => { emptyDir('files/empty') ] - ipfs.add(dirs, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] + const res = await ipfs.add(dirs) - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(fixtures.directory.cid) + const root = res[res.length - 1] + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(fixtures.directory.cid) - const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' - const stream = ipfs.lsPullStream(cid) + const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + const stream = ipfs.lsPullStream(cid) - pull( - stream, - pull.collect((err, files) => { - expect(err).to.not.exist() - - expect(files).to.eql([ - { - depth: 1, - name: 'alice.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', - size: 11685, - hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', - type: 'file' - }, - { - depth: 1, - name: 'empty-folder', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', - size: 0, - hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', - type: 'dir' - }, - { - depth: 1, - name: 'files', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', - size: 0, - hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', - type: 'dir' - }, - { - depth: 1, - name: 'holmes.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', - size: 581878, - hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', - type: 'file' - }, - { - depth: 1, - name: 'jungle.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', - size: 2294, - hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', - type: 'file' - }, - { - depth: 1, - name: 'pp.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', - size: 4540, - hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', - type: 'file' - } - ]) - done() - }) - ) - }) + const files = await pullToPromise.any(stream) + expect(files).to.eql([ + { + depth: 1, + name: 'alice.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + size: 11685, + hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', + type: 'file' + }, + { + depth: 1, + name: 'empty-folder', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', + size: 0, + hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', + type: 'dir' + }, + { + depth: 1, + name: 'files', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', + size: 0, + hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', + type: 'dir' + }, + { + depth: 1, + name: 'holmes.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', + size: 581878, + hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', + type: 'file' + }, + { + depth: 1, + name: 'jungle.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', + size: 2294, + hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', + type: 'file' + }, + { + depth: 1, + name: 'pp.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', + size: 4540, + hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', + type: 'file' + } + ]) }) }) } diff --git a/src/files-regular/ls-readable-stream.js b/src/files-regular/ls-readable-stream.js index bf70728e..f3d0cbbc 100644 --- a/src/files-regular/ls-readable-stream.js +++ b/src/files-regular/ls-readable-stream.js @@ -2,8 +2,8 @@ 'use strict' const { fixtures } = require('./utils') -const concat = require('concat-stream') const { getDescribe, getIt, expect } = require('../utils/mocha') +const getStream = require('get-stream') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -11,28 +11,17 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.lsReadableStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should readable stream ls with a base58 encoded CID', function (done) { + it('should readable stream ls with a base58 encoded CID', async function () { const content = (name) => ({ path: `test-folder/${name}`, content: fixtures.directory.files[name] @@ -51,70 +40,66 @@ module.exports = (createCommon, options) => { emptyDir('files/empty') ] - ipfs.add(dirs, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] + const res = await ipfs.add(dirs) - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(fixtures.directory.cid) + const root = res[res.length - 1] + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(fixtures.directory.cid) - const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' - const stream = ipfs.lsReadableStream(cid) + const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + const stream = ipfs.lsReadableStream(cid) - stream.pipe(concat((files) => { - expect(files).to.eql([ - { - depth: 1, - name: 'alice.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', - size: 11685, - hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', - type: 'file' - }, - { - depth: 1, - name: 'empty-folder', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', - size: 0, - hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', - type: 'dir' - }, - { - depth: 1, - name: 'files', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', - size: 0, - hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', - type: 'dir' - }, - { - depth: 1, - name: 'holmes.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', - size: 581878, - hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', - type: 'file' - }, - { - depth: 1, - name: 'jungle.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', - size: 2294, - hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', - type: 'file' - }, - { - depth: 1, - name: 'pp.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', - size: 4540, - hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', - type: 'file' - } - ]) - done() - })) - }) + const files = await getStream.array(stream) + expect(files).to.eql([ + { + depth: 1, + name: 'alice.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + size: 11685, + hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', + type: 'file' + }, + { + depth: 1, + name: 'empty-folder', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', + size: 0, + hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', + type: 'dir' + }, + { + depth: 1, + name: 'files', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', + size: 0, + hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', + type: 'dir' + }, + { + depth: 1, + name: 'holmes.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', + size: 581878, + hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', + type: 'file' + }, + { + depth: 1, + name: 'jungle.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', + size: 2294, + hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', + type: 'file' + }, + { + depth: 1, + name: 'pp.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', + size: 4540, + hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', + type: 'file' + } + ]) }) }) } diff --git a/src/files-regular/ls.js b/src/files-regular/ls.js index 6d528d68..3dc3b73e 100644 --- a/src/files-regular/ls.js +++ b/src/files-regular/ls.js @@ -14,28 +14,17 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.ls', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should ls with a base58 encoded CID', function (done) { + it('should ls with a base58 encoded CID', async function () { const content = (name) => ({ path: `test-folder/${name}`, content: fixtures.directory.files[name] @@ -54,73 +43,68 @@ module.exports = (createCommon, options) => { emptyDir('files/empty') ] - ipfs.add(dirs, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] - - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(fixtures.directory.cid) - - const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' - ipfs.ls(cid, (err, files) => { - expect(err).to.not.exist() - - expect(files).to.eql([ - { - depth: 1, - name: 'alice.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', - size: 11685, - hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', - type: 'file' - }, - { - depth: 1, - name: 'empty-folder', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', - size: 0, - hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', - type: 'dir' - }, - { - depth: 1, - name: 'files', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', - size: 0, - hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', - type: 'dir' - }, - { - depth: 1, - name: 'holmes.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', - size: 581878, - hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', - type: 'file' - }, - { - depth: 1, - name: 'jungle.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', - size: 2294, - hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', - type: 'file' - }, - { - depth: 1, - name: 'pp.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', - size: 4540, - hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', - type: 'file' - } - ]) - done() - }) - }) + const res = await ipfs.add(dirs) + + const root = res[res.length - 1] + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(fixtures.directory.cid) + + const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + const files = await ipfs.ls(cid) + + expect(files).to.eql([ + { + depth: 1, + name: 'alice.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + size: 11685, + hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', + type: 'file' + }, + { + depth: 1, + name: 'empty-folder', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', + size: 0, + hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', + type: 'dir' + }, + { + depth: 1, + name: 'files', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', + size: 0, + hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', + type: 'dir' + }, + { + depth: 1, + name: 'holmes.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', + size: 581878, + hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', + type: 'file' + }, + { + depth: 1, + name: 'jungle.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', + size: 2294, + hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', + type: 'file' + }, + { + depth: 1, + name: 'pp.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', + size: 4540, + hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', + type: 'file' + } + ]) }) - it('should ls files added as CIDv0 with a CIDv1', done => { + it('should ls files added as CIDv0 with a CIDv1', async () => { const dir = randomName('DIR') const input = [ @@ -128,26 +112,22 @@ module.exports = (createCommon, options) => { { path: `${dir}/${randomName('F1')}`, content: Buffer.from(randomName('D1')) } ] - ipfs.add(input, { cidVersion: 0 }, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.add(input, { cidVersion: 0 }) - const cidv0 = new CID(res[res.length - 1].hash) - expect(cidv0.version).to.equal(0) + const cidv0 = new CID(res[res.length - 1].hash) + expect(cidv0.version).to.equal(0) - const cidv1 = cidv0.toV1() + const cidv1 = cidv0.toV1() - ipfs.ls(cidv1, (err, output) => { - expect(err).to.not.exist() - expect(output.length).to.equal(input.length) - output.forEach(({ hash }) => { - expect(res.find(file => file.hash === hash)).to.exist() - }) - done() - }) + const output = await ipfs.ls(cidv1) + expect(output.length).to.equal(input.length) + + output.forEach(({ hash }) => { + expect(res.find(file => file.hash === hash)).to.exist() }) }) - it('should ls files added as CIDv1 with a CIDv0', done => { + it('should ls files added as CIDv1 with a CIDv0', async () => { const dir = randomName('DIR') const input = [ @@ -155,42 +135,30 @@ module.exports = (createCommon, options) => { { path: `${dir}/${randomName('F1')}`, content: Buffer.from(randomName('D1')) } ] - ipfs.add(input, { cidVersion: 1, rawLeaves: false }, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.add(input, { cidVersion: 1, rawLeaves: false }) + + const cidv1 = new CID(res[res.length - 1].hash) + expect(cidv1.version).to.equal(1) - const cidv1 = new CID(res[res.length - 1].hash) - expect(cidv1.version).to.equal(1) + const cidv0 = cidv1.toV1() - const cidv0 = cidv1.toV1() + const output = await ipfs.ls(cidv0) + expect(output.length).to.equal(input.length) - ipfs.ls(cidv0, (err, output) => { - expect(err).to.not.exist() - expect(output.length).to.equal(input.length) - output.forEach(({ hash }) => { - expect(res.find(file => file.hash === hash)).to.exist() - }) - done() - }) + output.forEach(({ hash }) => { + expect(res.find(file => file.hash === hash)).to.exist() }) }) - it('should correctly handle a non existing hash', (done) => { - ipfs.ls('surelynotavalidhashheh?', (err, res) => { - expect(err).to.exist() - expect(res).to.not.exist() - done() - }) + it('should correctly handle a non existing hash', () => { + return expect(ipfs.ls('surelynotavalidhashheh?')).to.eventually.be.rejected() }) - it('should correctly handle a non exiting path', (done) => { - ipfs.ls('QmRNjDeKStKGTQXnJ2NFqeQ9oW/folder_that_isnt_there', (err, res) => { - expect(err).to.exist() - expect(res).to.not.exist() - done() - }) + it('should correctly handle a non existing path', () => { + return expect(ipfs.ls('QmRNjDeKStKGTQXnJ2NFqeQ9oW/folder_that_isnt_there')).to.eventually.be.rejected() }) - it('should ls files by path', done => { + it('should ls files by path', async () => { const dir = randomName('DIR') const input = [ @@ -198,17 +166,12 @@ module.exports = (createCommon, options) => { { path: `${dir}/${randomName('F1')}`, content: Buffer.from(randomName('D1')) } ] - ipfs.add(input, (err, res) => { - expect(err).to.not.exist() - - ipfs.ls(`/ipfs/${res[res.length - 1].hash}`, (err, output) => { - expect(err).to.not.exist() - expect(output.length).to.equal(input.length) - output.forEach(({ hash }) => { - expect(res.find(file => file.hash === hash)).to.exist() - }) - done() - }) + const res = await ipfs.add(input) + const output = await ipfs.ls(`/ipfs/${res[res.length - 1].hash}`) + expect(output.length).to.equal(input.length) + + output.forEach(({ hash }) => { + expect(res.find(file => file.hash === hash)).to.exist() }) }) }) diff --git a/src/files-regular/refs-local-pull-stream.js b/src/files-regular/refs-local-pull-stream.js index 0f2b6977..88774247 100644 --- a/src/files-regular/refs-local-pull-stream.js +++ b/src/files-regular/refs-local-pull-stream.js @@ -1,14 +1,13 @@ /* eslint-env mocha */ 'use strict' -const pull = require('pull-stream') +const pullToPromise = require('pull-to-promise') module.exports = (createCommon, options) => { const ipfsRefsLocal = (ipfs) => { - return (cb) => { - const stream = ipfs.refs.localPullStream() - pull(stream, pull.collect(cb)) - } + const stream = ipfs.refs.localPullStream() + + return pullToPromise.any(stream) } require('./refs-local-tests')(createCommon, '.refs.localPullStream', ipfsRefsLocal, options) } diff --git a/src/files-regular/refs-local-readable-stream.js b/src/files-regular/refs-local-readable-stream.js index 9b1fbec7..236961d1 100644 --- a/src/files-regular/refs-local-readable-stream.js +++ b/src/files-regular/refs-local-readable-stream.js @@ -1,15 +1,12 @@ /* eslint-env mocha */ 'use strict' -const concat = require('concat-stream') +const getStream = require('get-stream') module.exports = (createCommon, options) => { const ipfsRefsLocal = (ipfs) => { - return (cb) => { - const stream = ipfs.refs.localReadableStream() - stream.on('error', cb) - stream.pipe(concat((refs) => cb(null, refs))) - } + const stream = ipfs.refs.localReadableStream() + return getStream.array(stream) } require('./refs-local-tests')(createCommon, '.refs.localReadableStream', ipfsRefsLocal, options) } diff --git a/src/files-regular/refs-local-tests.js b/src/files-regular/refs-local-tests.js index af6f7fcb..78c8e670 100644 --- a/src/files-regular/refs-local-tests.js +++ b/src/files-regular/refs-local-tests.js @@ -10,28 +10,17 @@ module.exports = (createCommon, suiteName, ipfsRefsLocal, options) => { const common = createCommon() describe(suiteName, function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should get local refs', function (done) { + it('should get local refs', async function () { const content = (name) => ({ path: `test-folder/${name}`, content: fixtures.directory.files[name] @@ -42,19 +31,13 @@ module.exports = (createCommon, suiteName, ipfsRefsLocal, options) => { content('holmes.txt') ] - ipfs.add(dirs, (err, res) => { - expect(err).to.not.exist() - - ipfsRefsLocal(ipfs)((err, refs) => { - expect(err).to.not.exist() + await ipfs.add(dirs) - const cids = refs.map(r => r.ref) - expect(cids).to.include('QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn') - expect(cids).to.include('QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr') + const refs = await ipfsRefsLocal(ipfs) - done() - }) - }) + const cids = refs.map(r => r.ref) + expect(cids).to.include('QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn') + expect(cids).to.include('QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr') }) }) } diff --git a/src/files-regular/refs-local.js b/src/files-regular/refs-local.js index d3f0b815..53737e5d 100644 --- a/src/files-regular/refs-local.js +++ b/src/files-regular/refs-local.js @@ -2,6 +2,6 @@ 'use strict' module.exports = (createCommon, options) => { - const ipfsRefsLocal = (ipfs) => (cb) => ipfs.refs.local(cb) + const ipfsRefsLocal = (ipfs) => ipfs.refs.local() require('./refs-local-tests')(createCommon, '.refs.local', ipfsRefsLocal, options) } diff --git a/src/files-regular/refs-pull-stream.js b/src/files-regular/refs-pull-stream.js index d2602737..51885754 100644 --- a/src/files-regular/refs-pull-stream.js +++ b/src/files-regular/refs-pull-stream.js @@ -1,14 +1,13 @@ /* eslint-env mocha */ 'use strict' -const pull = require('pull-stream') +const pullToPromise = require('pull-to-promise') module.exports = (createCommon, options) => { - const ipfsRefs = (ipfs) => { - return (path, params, cb) => { - const stream = ipfs.refsPullStream(path, params) - pull(stream, pull.collect(cb)) - } + const ipfsRefs = (ipfs) => (path, params) => { + const stream = ipfs.refsPullStream(path, params) + + return pullToPromise.any(stream) } require('./refs-tests')(createCommon, '.refsPullStream', ipfsRefs, options) } diff --git a/src/files-regular/refs-readable-stream.js b/src/files-regular/refs-readable-stream.js index 23bc4006..b49072ea 100644 --- a/src/files-regular/refs-readable-stream.js +++ b/src/files-regular/refs-readable-stream.js @@ -1,15 +1,12 @@ /* eslint-env mocha */ 'use strict' -const concat = require('concat-stream') +const getStream = require('get-stream') module.exports = (createCommon, options) => { - const ipfsRefs = (ipfs) => { - return (path, params, cb) => { - const stream = ipfs.refsReadableStream(path, params) - stream.on('error', cb) - stream.pipe(concat((refs) => cb(null, refs))) - } + const ipfsRefs = (ipfs) => (path, params) => { + const stream = ipfs.refsReadableStream(path, params) + return getStream.array(stream) } require('./refs-tests')(createCommon, '.refsReadableStream', ipfsRefs, options) } diff --git a/src/files-regular/refs-tests.js b/src/files-regular/refs-tests.js index efb32430..60bdebc4 100644 --- a/src/files-regular/refs-tests.js +++ b/src/files-regular/refs-tests.js @@ -1,7 +1,8 @@ /* eslint-env mocha */ 'use strict' -const mapSeries = require('async/mapSeries') +const pMapSeries = require('p-map-series') +const pTimeout = require('p-timeout') const { getDescribe, getIt, expect } = require('../utils/mocha') const loadFixture = require('aegir/fixtures') const CID = require('cids') @@ -12,113 +13,73 @@ module.exports = (createCommon, suiteName, ipfsRefs, options) => { const common = createCommon() describe(suiteName, function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs, pbRootCb, dagRootCid - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - before(function (done) { - loadPbContent(ipfs, getMockObjects(), (err, cid) => { - expect(err).to.not.exist() - pbRootCb = cid - done() - }) + before(async function () { + const cid = await loadPbContent(ipfs, getMockObjects()) + pbRootCb = cid }) - before(function (done) { - loadDagContent(ipfs, getMockObjects(), (err, cid) => { - expect(err).to.not.exist() - dagRootCid = cid - done() - }) + before(async function () { + const cid = await loadDagContent(ipfs, getMockObjects()) + dagRootCid = cid }) - after((done) => common.teardown(done)) + after(() => common.teardown()) for (const [name, options] of Object.entries(getRefsTests())) { const { path, params, expected, expectError, expectTimeout } = options // eslint-disable-next-line no-loop-func - it(name, function (done) { + it(name, async function () { this.timeout(20 * 1000) - // If we're expecting a timeout, call done when it expires - let timeout + // Call out to IPFS + const p = (path ? path(pbRootCb) : pbRootCb) + if (expectTimeout) { - timeout = setTimeout(() => { - done() - done = null - }, expectTimeout) + return expect(pTimeout(ipfsRefs(ipfs)(p, params), expectTimeout)).to.eventually.be.rejected + .and.be.an.instanceOf(Error) + .and.to.have.property('name') + .to.eql('TimeoutError') } - // Call out to IPFS - const p = (path ? path(pbRootCb) : pbRootCb) - ipfsRefs(ipfs)(p, params, (err, refs) => { - if (!done) { - // Already timed out - return - } - - if (expectError) { - // Expected an error - expect(err).to.exist() - return done() - } - - if (expectTimeout && !err) { - // Expected a timeout but there wasn't one - return expect.fail('Expected timeout error') - } - - // Check there was no error and the refs match what was expected - expect(err).to.not.exist() - expect(refs.map(r => r.ref)).to.eql(expected) - - // Clear any pending timeout - clearTimeout(timeout) - - done() - }) + if (expectError) { + return expect(ipfsRefs(ipfs)(p, params)).to.be.eventually.rejected.and.be.an.instanceOf(Error) + } + + const refs = await ipfsRefs(ipfs)(p, params) + + // Check there was no error and the refs match what was expected + expect(refs.map(r => r.ref)).to.eql(expected) }) } - it('dag refs test', function (done) { + it('dag refs test', async function () { this.timeout(20 * 1000) // Call out to IPFS - ipfsRefs(ipfs)(`/ipfs/${dagRootCid}`, { recursive: true }, (err, refs) => { - // Check there was no error and the refs match what was expected - expect(err).to.not.exist() - expect(refs.map(r => r.ref).sort()).to.eql([ - 'QmPDqvcuA4AkhBLBuh2y49yhUB98rCnxPxa3eVNC1kAbSC', - 'QmVwtsLUHurA6wUirPSdGeEW5tfBEqenXpeRaqr8XN7bNY', - 'QmXGL3ZdYV5rNLCfHe1QsFSQGekRFzgbBu1B3XGZ7DV9fd', - 'QmcSVZRN5E814KkPy4EHnftNAR7htbFvVhRKKqFs4FBwDG', - 'QmcSVZRN5E814KkPy4EHnftNAR7htbFvVhRKKqFs4FBwDG', - 'QmdBcHbK7uDQav8YrHsfKju3EKn48knxjd96KRMFs3gtS9', - 'QmeX96opBHZHLySMFoNiWS5msxjyX6rqtr3Rr1u7uxn7zJ', - 'Qmf8MwTnY7VdcnF8WcoJ3GB24NmNd1HsGzuEWCtUYDP38x', - 'bafyreiagelcmhfn33zuslkdo7fkes3dzcr2nju6meh75zm6vqklfqiojam', - 'bafyreic2f6adq5tqnbrvwiqc3jkz2cf4tz3cz2rp6plpij2qaoufgsxwmi', - 'bafyreidoqtyvflv5v4c3gd3izxvpq4flke55ayurbrnhsxh7z5wwjc6v6e', - 'bafyreifs2ub2lnq6n2quqbi3zb5homs5iqlmm77b3am252cqzxiu7phwpy' - ]) - - done() - }) + const refs = await ipfsRefs(ipfs)(`/ipfs/${dagRootCid}`, { recursive: true }) + // Check the refs match what was expected + expect(refs.map(r => r.ref).sort()).to.eql([ + 'QmPDqvcuA4AkhBLBuh2y49yhUB98rCnxPxa3eVNC1kAbSC', + 'QmVwtsLUHurA6wUirPSdGeEW5tfBEqenXpeRaqr8XN7bNY', + 'QmXGL3ZdYV5rNLCfHe1QsFSQGekRFzgbBu1B3XGZ7DV9fd', + 'QmcSVZRN5E814KkPy4EHnftNAR7htbFvVhRKKqFs4FBwDG', + 'QmcSVZRN5E814KkPy4EHnftNAR7htbFvVhRKKqFs4FBwDG', + 'QmdBcHbK7uDQav8YrHsfKju3EKn48knxjd96KRMFs3gtS9', + 'QmeX96opBHZHLySMFoNiWS5msxjyX6rqtr3Rr1u7uxn7zJ', + 'Qmf8MwTnY7VdcnF8WcoJ3GB24NmNd1HsGzuEWCtUYDP38x', + 'bafyreiagelcmhfn33zuslkdo7fkes3dzcr2nju6meh75zm6vqklfqiojam', + 'bafyreic2f6adq5tqnbrvwiqc3jkz2cf4tz3cz2rp6plpij2qaoufgsxwmi', + 'bafyreidoqtyvflv5v4c3gd3izxvpq4flke55ayurbrnhsxh7z5wwjc6v6e', + 'bafyreifs2ub2lnq6n2quqbi3zb5homs5iqlmm77b3am252cqzxiu7phwpy' + ]) }) }) } @@ -340,43 +301,38 @@ function getRefsTests () { } } -function loadPbContent (ipfs, node, callback) { +function loadPbContent (ipfs, node) { const store = { - putData: (data, cb) => ipfs.object.put({ Data: data, Links: [] }, cb), - putLinks: (links, cb) => { + putData: (data) => ipfs.object.put({ Data: data, Links: [] }), + putLinks: (links) => ipfs.object.put({ Data: '', Links: links.map(({ name, cid }) => ({ Name: name, Hash: cid, Size: 8 })) - }, cb) - } + }) } - loadContent(ipfs, store, node, callback) + return loadContent(ipfs, store, node) } -function loadDagContent (ipfs, node, callback) { +function loadDagContent (ipfs, node) { const store = { - putData: (data, cb) => { - ipfs.add(data, (err, res) => { - if (err) { - return cb(err) - } - return cb(null, res[0].hash) - }) + putData: async (data) => { + const res = await ipfs.add(data) + return res[0].hash }, - putLinks: (links, cb) => { + putLinks: (links) => { const obj = {} for (const { name, cid } of links) { obj[name] = new CID(cid) } - ipfs.dag.put(obj, cb) + return ipfs.dag.put(obj) } } - loadContent(ipfs, store, node, callback) + return loadContent(ipfs, store, node) } -function loadContent (ipfs, store, node, callback) { +async function loadContent (ipfs, store, node) { if (Buffer.isBuffer(node)) { - return store.putData(node, callback) + return store.putData(node) } if (typeof node === 'object') { @@ -389,16 +345,12 @@ function loadContent (ipfs, store, node, callback) { } return 0 }) - mapSeries(sorted, ([name, child], cb) => { - loadContent(ipfs, store, child, (err, cid) => { - cb(err, { name, cid: cid && cid.toString() }) - }) - }, (err, res) => { - if (err) { - return callback(err) - } - store.putLinks(res, callback) + const res = await pMapSeries(sorted, async ([name, child]) => { + const cid = await loadContent(ipfs, store, child) + return { name, cid: cid && cid.toString() } }) + + return store.putLinks(res) } } diff --git a/src/key/export.js b/src/key/export.js index 557d323d..ddfb783b 100644 --- a/src/key/export.js +++ b/src/key/export.js @@ -9,32 +9,19 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.key.export', () => { + describe('.key.export', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should export "self" key', function (done) { - ipfs.key.export('self', hat(), (err, pem) => { - expect(err).to.not.exist() - expect(pem).to.exist() - done() - }) + it('should export "self" key', async function () { + const pem = await ipfs.key.export('self', hat()) + expect(pem).to.exist() }) }) } diff --git a/src/key/gen.js b/src/key/gen.js index 355123b4..71d5be51 100644 --- a/src/key/gen.js +++ b/src/key/gen.js @@ -9,41 +9,28 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.key.gen', () => { + describe('.key.gen', function () { + this.timeout(60 * 1000) const keyTypes = [ { type: 'rsa', size: 2048 } ] let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) keyTypes.forEach((kt) => { - it(`should generate a new ${kt.type} key`, function (done) { + it(`should generate a new ${kt.type} key`, async function () { this.timeout(20 * 1000) const name = hat() - ipfs.key.gen(name, kt, (err, key) => { - expect(err).to.not.exist() - expect(key).to.exist() - expect(key).to.have.property('name', name) - expect(key).to.have.property('id') - done() - }) + const key = await ipfs.key.gen(name, kt) + expect(key).to.exist() + expect(key).to.have.property('name', name) + expect(key).to.have.property('id') }) }) }) diff --git a/src/key/import.js b/src/key/import.js index 60065d3e..8669223c 100644 --- a/src/key/import.js +++ b/src/key/import.js @@ -9,41 +9,26 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.key.import', () => { + describe('.key.import', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should import an exported key', (done) => { + it('should import an exported key', async () => { const password = hat() - ipfs.key.export('self', password, (err, pem) => { - expect(err).to.not.exist() - expect(pem).to.exist() - - ipfs.key.import('clone', pem, password, (err, key) => { - expect(err).to.not.exist() - expect(key).to.exist() - expect(key).to.have.property('name', 'clone') - expect(key).to.have.property('id') - done() - }) - }) + const pem = await ipfs.key.export('self', password) + expect(pem).to.exist() + + const key = await ipfs.key.import('clone', pem, password) + expect(key).to.exist() + expect(key).to.have.property('name', 'clone') + expect(key).to.have.property('id') }) }) } diff --git a/src/key/list.js b/src/key/list.js index ad5b935c..38398943 100644 --- a/src/key/list.js +++ b/src/key/list.js @@ -2,7 +2,7 @@ /* eslint max-nested-callbacks: ["error", 6] */ 'use strict' -const timesSeries = require('async/timesSeries') +const pTimes = require('p-times') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -11,47 +11,27 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.key.list', () => { + describe('.key.list', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) - - it('should list all the keys', function (done) { - this.timeout(60 * 1000) - - timesSeries(3, (n, cb) => { - ipfs.key.gen(hat(), { type: 'rsa', size: 2048 }, cb) - }, (err, keys) => { - expect(err).to.not.exist() + after(() => common.teardown()) - ipfs.key.list((err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res).to.be.an('array') - expect(res.length).to.be.above(keys.length - 1) + it('should list all the keys', async function () { + const keys = await pTimes(3, () => ipfs.key.gen(hat(), { type: 'rsa', size: 2048 }), { concurrency: 1 }) - keys.forEach(key => { - const found = res.find(({ id, name }) => name === key.name && id === key.id) - expect(found).to.exist() - }) + const res = await ipfs.key.list() + expect(res).to.exist() + expect(res).to.be.an('array') + expect(res.length).to.be.above(keys.length - 1) - done() - }) + keys.forEach(key => { + const found = res.find(({ id, name }) => name === key.name && id === key.id) + expect(found).to.exist() }) }) }) diff --git a/src/key/rename.js b/src/key/rename.js index b2c89561..a1879605 100644 --- a/src/key/rename.js +++ b/src/key/rename.js @@ -10,50 +10,31 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.key.rename', () => { + describe('.key.rename', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) - - it('should rename a key', function (done) { - this.timeout(30 * 1000) + after(() => common.teardown()) + it('should rename a key', async function () { const oldName = hat() const newName = hat() - ipfs.key.gen(oldName, { type: 'rsa', size: 2048 }, (err, key) => { - expect(err).to.not.exist() - - ipfs.key.rename(oldName, newName, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res).to.have.property('was', oldName) - expect(res).to.have.property('now', newName) - expect(res).to.have.property('id', key.id) - - ipfs.key.list((err, res) => { - expect(err).to.not.exist() - expect(res.find(k => k.name === newName)).to.exist() - expect(res.find(k => k.name === oldName)).to.not.exist() - done() - }) - }) - }) + const key = await ipfs.key.gen(oldName, { type: 'rsa', size: 2048 }) + + const renameRes = await ipfs.key.rename(oldName, newName) + expect(renameRes).to.exist() + expect(renameRes).to.have.property('was', oldName) + expect(renameRes).to.have.property('now', newName) + expect(renameRes).to.have.property('id', key.id) + + const res = await ipfs.key.list() + expect(res.find(k => k.name === newName)).to.exist() + expect(res.find(k => k.name === oldName)).to.not.exist() }) }) } diff --git a/src/key/rm.js b/src/key/rm.js index 87ce3e93..5140ed90 100644 --- a/src/key/rm.js +++ b/src/key/rm.js @@ -10,45 +10,26 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.key.rm', () => { + describe('.key.rm', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should rm a key', function (done) { - this.timeout(30 * 1000) + it('should rm a key', async function () { + const key = await ipfs.key.gen(hat(), { type: 'rsa', size: 2048 }) - ipfs.key.gen(hat(), { type: 'rsa', size: 2048 }, (err, key) => { - expect(err).to.not.exist() + const removeRes = await ipfs.key.rm(key.name) + expect(removeRes).to.exist() + expect(removeRes).to.have.property('name', key.name) + expect(removeRes).to.have.property('id', key.id) - ipfs.key.rm(key.name, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res).to.have.property('name', key.name) - expect(res).to.have.property('id', key.id) - - ipfs.key.list((err, res) => { - expect(err).to.not.exist() - expect(res.find(k => k.name === key.name)).to.not.exist() - done() - }) - }) - }) + const res = await ipfs.key.list() + expect(res.find(k => k.name === key.name)).to.not.exist() }) }) } diff --git a/src/miscellaneous/dns.js b/src/miscellaneous/dns.js index a8e2fe8a..490c04db 100644 --- a/src/miscellaneous/dns.js +++ b/src/miscellaneous/dns.js @@ -13,44 +13,31 @@ module.exports = (createCommon, options) => { this.retries(3) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => { - common.teardown(done) - }) + after(() => common.teardown()) + + it('should non-recursively resolve ipfs.io', async () => { + const res = await ipfs.dns('ipfs.io', { recursive: false }) - it('should non-recursively resolve ipfs.io', () => { - return ipfs.dns('ipfs.io', { recursive: false }).then(res => { // matches pattern /ipns/ - expect(res).to.match(/\/ipns\/.+$/) - }) + expect(res).to.match(/\/ipns\/.+$/) }) - it('should recursively resolve ipfs.io', () => { - return ipfs.dns('ipfs.io', { recursive: true }).then(res => { + it('should recursively resolve ipfs.io', async () => { + const res = await ipfs.dns('ipfs.io', { recursive: true }) + // matches pattern /ipfs/ - expect(res).to.match(/\/ipfs\/.+$/) - }) + expect(res).to.match(/\/ipfs\/.+$/) }) - it('should resolve subdomain docs.ipfs.io', () => { - return ipfs.dns('docs.ipfs.io').then(res => { + it('should resolve subdomain docs.ipfs.io', async () => { + const res = await ipfs.dns('docs.ipfs.io') + // matches pattern /ipfs/ - expect(res).to.match(/\/ipfs\/.+$/) - }) + expect(res).to.match(/\/ipfs\/.+$/) }) }) } diff --git a/src/miscellaneous/id.js b/src/miscellaneous/id.js index 4718ce76..2ba82464 100644 --- a/src/miscellaneous/id.js +++ b/src/miscellaneous/id.js @@ -12,36 +12,16 @@ module.exports = (createCommon, options) => { this.timeout(60 * 1000) let ipfs - before(function (done) { - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => { - common.teardown(done) - }) - - it('should get the node ID', (done) => { - ipfs.id((err, res) => { - expect(err).to.not.exist() - expect(res).to.have.a.property('id') - expect(res).to.have.a.property('publicKey') - done() - }) - }) + after(() => common.teardown()) - it('should get the node ID (promised)', () => { - return ipfs.id() - .then((res) => { - expect(res).to.have.a.property('id') - expect(res).to.have.a.property('publicKey') - }) + it('should get the node ID', async () => { + const res = await ipfs.id() + expect(res).to.have.a.property('id') + expect(res).to.have.a.property('publicKey') }) }) } diff --git a/src/miscellaneous/resolve.js b/src/miscellaneous/resolve.js index b52f4f7d..699c122b 100644 --- a/src/miscellaneous/resolve.js +++ b/src/miscellaneous/resolve.js @@ -6,8 +6,6 @@ const isIpfs = require('is-ipfs') const loadFixture = require('aegir/fixtures') const hat = require('hat') const multibase = require('multibase') -const { spawnNodeWithId } = require('../utils/spawn') -const { connect } = require('../utils/swarm') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -18,22 +16,12 @@ module.exports = (createCommon, options) => { describe('.resolve', function () { this.timeout(60 * 1000) let ipfs - let nodeId - - before(function (done) { - common.setup((err, factory) => { - expect(err).to.not.exist() - spawnNodeWithId(factory, (err, node) => { - expect(err).to.not.exist() - - ipfs = node - nodeId = node.peerId.id - done() - }) - }) + + before(async () => { + ipfs = await common.setup() }) - after(common.teardown) + after(() => common.teardown()) it('should resolve an IPFS hash', async () => { const content = loadFixture('test/fixtures/testfile.txt', 'interface-ipfs-core') @@ -89,25 +77,14 @@ module.exports = (createCommon, options) => { }) it('should resolve IPNS link recursively', async function () { - this.timeout(20 * 1000) - - // Ensure another node exists for publishing to - await new Promise((resolve, reject) => { - common.setup((err, factory) => { - if (err) return reject(err) - spawnNodeWithId(factory, (err, node) => { - if (err) return reject(err) - const addr = node.peerId.addresses.find((a) => a.includes('127.0.0.1')) - connect(ipfs, addr, resolve) - }) - }) - }) + const node = await common.setup() + await ipfs.swarm.connect(node.peerId.addresses.find((a) => a.includes('127.0.0.1'))) const [{ path }] = await ipfs.add(Buffer.from('should resolve a record recursive === true')) const { id: keyId } = await ipfs.key.gen('key-name', { type: 'rsa', size: 2048 }) await ipfs.name.publish(path, { allowOffline: true }) - await ipfs.name.publish(`/ipns/${nodeId}`, { allowOffline: true, key: 'key-name', resolve: false }) + await ipfs.name.publish(`/ipns/${ipfs.peerId.id}`, { allowOffline: true, key: 'key-name', resolve: false }) return expect(await ipfs.resolve(`/ipns/${keyId}`, { recursive: true })) .to.eq(`/ipfs/${path}`) diff --git a/src/miscellaneous/stop.js b/src/miscellaneous/stop.js index fcc7eb51..d83fdc9e 100644 --- a/src/miscellaneous/stop.js +++ b/src/miscellaneous/stop.js @@ -8,42 +8,18 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.stop', () => { - let ipfs + describe('.stop', function () { + this.timeout(60 * 1000) - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - after((done) => { - common.teardown(done) - }) - - // must be last test to run - it('should stop the node', function (done) { + it('should stop the node', async function () { + const ipfs = await common.setup() this.timeout(10 * 1000) - ipfs.stop((err) => { - expect(err).to.not.exist() + await ipfs.stop() - // Trying to stop an already stopped node should return an error - // as the node can't respond to requests anymore - ipfs.stop((err) => { - expect(err).to.exist() - done() - }) - }) + // Trying to stop an already stopped node should return an error + // as the node can't respond to requests anymore + return expect(ipfs.stop()).to.eventually.be.rejected() }) }) } diff --git a/src/miscellaneous/version.js b/src/miscellaneous/version.js index 3c8e95e9..1d8f58eb 100644 --- a/src/miscellaneous/version.js +++ b/src/miscellaneous/version.js @@ -8,45 +8,21 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.version', () => { + describe('.version', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - after((done) => { - common.teardown(done) + before(async () => { + ipfs = await common.setup() }) - it('should get the node version', (done) => { - ipfs.version((err, result) => { - expect(err).to.not.exist() - expect(result).to.have.a.property('version') - expect(result).to.have.a.property('commit') - expect(result).to.have.a.property('repo') - done() - }) - }) + after(() => common.teardown()) - it('should get the node version (promised)', () => { - return ipfs.version() - .then((result) => { - expect(result).to.have.a.property('version') - expect(result).to.have.a.property('commit') - expect(result).to.have.a.property('repo') - }) + it('should get the node version', async () => { + const result = await ipfs.version() + expect(result).to.have.a.property('version') + expect(result).to.have.a.property('commit') + expect(result).to.have.a.property('repo') }) }) } diff --git a/src/name-pubsub/cancel.js b/src/name-pubsub/cancel.js index b46dcb82..cf2a30d0 100644 --- a/src/name-pubsub/cancel.js +++ b/src/name-pubsub/cancel.js @@ -2,10 +2,8 @@ /* eslint-env mocha */ 'use strict' -const auto = require('async/auto') const PeerId = require('peer-id') -const { spawnNodeWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -17,72 +15,43 @@ module.exports = (createCommon, options) => { let ipfs let nodeId - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodeWithId(factory, (err, node) => { - expect(err).to.not.exist() - - ipfs = node - nodeId = node.peerId.id - - done() - }) - }) + before(async () => { + ipfs = await common.setup() + nodeId = ipfs.peerId.id }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should return false when the name that is intended to cancel is not subscribed', function (done) { + it('should return false when the name that is intended to cancel is not subscribed', async function () { this.timeout(60 * 1000) - ipfs.name.pubsub.cancel(nodeId, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res).to.have.property('canceled') - expect(res.canceled).to.eql(false) - - done() - }) + const res = await ipfs.name.pubsub.cancel(nodeId) + expect(res).to.exist() + expect(res).to.have.property('canceled') + expect(res.canceled).to.eql(false) }) - it('should cancel a subscription correctly returning true', function (done) { + it('should cancel a subscription correctly returning true', async function () { this.timeout(300 * 1000) - PeerId.create({ bits: 512 }, (err, peerId) => { - expect(err).to.not.exist() + const peerId = await PeerId.create({ bits: 512 }) + + const id = peerId.toB58String() + const ipnsPath = `/ipns/${id}` - const id = peerId.toB58String() - const ipnsPath = `/ipns/${id}` + const subs = await ipfs.name.pubsub.subs() + expect(subs).to.be.an('array').that.does.not.include(ipnsPath) - ipfs.name.pubsub.subs((err, res) => { - expect(err).to.not.exist() - expect(res).to.be.an('array').that.does.not.include(ipnsPath) + await expect(ipfs.name.resolve(id)).to.be.rejected() - ipfs.name.resolve(id, (err) => { - expect(err).to.exist() - auto({ - subs1: (cb) => ipfs.name.pubsub.subs(cb), - cancel: ['subs1', (_, cb) => ipfs.name.pubsub.cancel(ipnsPath, cb)], - subs2: ['cancel', (_, cb) => ipfs.name.pubsub.subs(cb)] - }, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res.subs1).to.be.an('array').that.does.include(ipnsPath) - expect(res.cancel).to.have.property('canceled') - expect(res.cancel.canceled).to.eql(true) - expect(res.subs2).to.be.an('array').that.does.not.include(ipnsPath) + const subs1 = await ipfs.name.pubsub.subs() + const cancel = await ipfs.name.pubsub.cancel(ipnsPath) + const subs2 = await ipfs.name.pubsub.subs() - done() - }) - }) - }) - }) + expect(subs1).to.be.an('array').that.does.include(ipnsPath) + expect(cancel).to.have.property('canceled') + expect(cancel.canceled).to.eql(true) + expect(subs2).to.be.an('array').that.does.not.include(ipnsPath) }) }) } diff --git a/src/name-pubsub/state.js b/src/name-pubsub/state.js index c153c0d0..df40c7b2 100644 --- a/src/name-pubsub/state.js +++ b/src/name-pubsub/state.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const { spawnNodeWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -10,38 +9,22 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.name.pubsub.state', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodeWithId(factory, (err, node) => { - expect(err).to.not.exist() - - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should get the current state of pubsub', function (done) { + it('should get the current state of pubsub', async function () { this.timeout(50 * 1000) - ipfs.name.pubsub.state((err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res).to.have.property('enabled') - expect(res.enabled).to.be.eql(true) - - done() - }) + const res = await ipfs.name.pubsub.state() + expect(res).to.exist() + expect(res).to.have.property('enabled') + expect(res.enabled).to.be.eql(true) }) }) } diff --git a/src/name-pubsub/subs.js b/src/name-pubsub/subs.js index d7a52937..388d650d 100644 --- a/src/name-pubsub/subs.js +++ b/src/name-pubsub/subs.js @@ -2,7 +2,6 @@ /* eslint-env mocha */ 'use strict' -const { spawnNodeWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -13,56 +12,31 @@ module.exports = (createCommon, options) => { describe('.name.pubsub.subs', function () { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodeWithId(factory, (err, node) => { - expect(err).to.not.exist() - - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should get an empty array as a result of subscriptions before any resolve', function (done) { + it('should get an empty array as a result of subscriptions before any resolve', async function () { this.timeout(60 * 1000) - ipfs.name.pubsub.subs((err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res).to.eql([]) - - done() - }) + const res = await ipfs.name.pubsub.subs() + expect(res).to.exist() + expect(res).to.eql([]) }) - it('should get the list of subscriptions updated after a resolve', function (done) { + it('should get the list of subscriptions updated after a resolve', async function () { this.timeout(300 * 1000) const id = 'QmNP1ASen5ZREtiJTtVD3jhMKhoPb1zppET1tgpjHx2NGA' - ipfs.name.pubsub.subs((err, res) => { - expect(err).to.not.exist() - expect(res).to.eql([]) // initally empty - - ipfs.name.resolve(id, (err) => { - expect(err).to.exist() + const subs = await ipfs.name.pubsub.subs() + expect(subs).to.eql([]) // initally empty - ipfs.name.pubsub.subs((err, res) => { - expect(err).to.not.exist() - expect(res).to.be.an('array').that.does.include(`/ipns/${id}`) + await expect(ipfs.name.resolve(id)).to.be.rejected() - done() - }) - }) - }) + const res = await ipfs.name.pubsub.subs() + expect(res).to.be.an('array').that.does.include(`/ipns/${id}`) }) }) } diff --git a/src/name/publish.js b/src/name/publish.js index 40e7f21e..b21315ea 100644 --- a/src/name/publish.js +++ b/src/name/publish.js @@ -4,7 +4,6 @@ const hat = require('hat') const { fixture } = require('./utils') -const { spawnNodeWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -17,40 +16,23 @@ module.exports = (createCommon, options) => { let ipfs let nodeId - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodeWithId(factory, (err, node) => { - expect(err).to.not.exist() - - ipfs = node - nodeId = node.peerId.id - - ipfs.add(fixture.data, { pin: false }, done) - }) - }) + before(async () => { + ipfs = await common.setup() + nodeId = ipfs.peerId.id + await ipfs.add(fixture.data, { pin: false }) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should publish an IPNS record with the default params', function (done) { + it('should publish an IPNS record with the default params', async function () { this.timeout(50 * 1000) const value = fixture.cid - ipfs.name.publish(value, { allowOffline: true }, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res.name).to.equal(nodeId) - expect(res.value).to.equal(`/ipfs/${value}`) - - done() - }) + const res = await ipfs.name.publish(value, { allowOffline: true }) + expect(res).to.exist() + expect(res.name).to.equal(nodeId) + expect(res.value).to.equal(`/ipfs/${value}`) }) it('should publish correctly with the lifetime option and resolve', async () => { @@ -60,7 +42,7 @@ module.exports = (createCommon, options) => { return expect(await ipfs.name.resolve(`/ipns/${nodeId}`)).to.eq(`/ipfs/${path}`) }) - it('should publish correctly when the file was not added but resolve is disabled', function (done) { + it('should publish correctly when the file was not added but resolve is disabled', async function () { this.timeout(50 * 1000) const value = 'QmPFVLPmp9zv5Z5KUqLhe2EivAGccQW2r7M7jhVJGLZoZU' @@ -73,17 +55,13 @@ module.exports = (createCommon, options) => { allowOffline: true } - ipfs.name.publish(value, options, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res.name).to.equal(nodeId) - expect(res.value).to.equal(`/ipfs/${value}`) - - done() - }) + const res = await ipfs.name.publish(value, options) + expect(res).to.exist() + expect(res.name).to.equal(nodeId) + expect(res.value).to.equal(`/ipfs/${value}`) }) - it('should publish with a key received as param, instead of using the key of the node', function (done) { + it('should publish with a key received as param, instead of using the key of the node', async function () { this.timeout(90 * 1000) const value = fixture.cid @@ -95,18 +73,12 @@ module.exports = (createCommon, options) => { allowOffline: true } - ipfs.key.gen(keyName, { type: 'rsa', size: 2048 }, function (err, key) { - expect(err).to.not.exist() - - ipfs.name.publish(value, options, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res.name).to.equal(key.id) - expect(res.value).to.equal(`/ipfs/${value}`) + const key = await ipfs.key.gen(keyName, { type: 'rsa', size: 2048 }) - done() - }) - }) + const res = await ipfs.name.publish(value, options) + expect(res).to.exist() + expect(res.name).to.equal(key.id) + expect(res.value).to.equal(`/ipfs/${value}`) }) }) } diff --git a/src/name/resolve.js b/src/name/resolve.js index 5537e83e..1f72a0c0 100644 --- a/src/name/resolve.js +++ b/src/name/resolve.js @@ -2,9 +2,8 @@ /* eslint-env mocha */ 'use strict' -const { spawnNodeWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') -const delay = require('../utils/delay') +const delay = require('delay') const CID = require('cids') module.exports = (createCommon, options) => { @@ -16,21 +15,12 @@ module.exports = (createCommon, options) => { let ipfs let nodeId - before(function (done) { - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodeWithId(factory, (err, node) => { - expect(err).to.not.exist() - - ipfs = node - nodeId = node.peerId.id - done() - }) - }) + before(async () => { + ipfs = await common.setup() + nodeId = ipfs.peerId.id }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should resolve a record default options', async function () { this.timeout(20 * 1000) @@ -142,20 +132,11 @@ module.exports = (createCommon, options) => { let ipfs this.retries(5) - before(function (done) { - common.setup((err, factory) => { - expect(err).to.not.exist() - - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should resolve /ipns/ipfs.io', async () => { return expect(await ipfs.name.resolve('/ipns/ipfs.io')) diff --git a/src/object/data.js b/src/object/data.js index bfed3f39..1c31b173 100644 --- a/src/object/data.js +++ b/src/object/data.js @@ -16,55 +16,38 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should get data by multihash', (done) => { + it('should get data by multihash', async () => { const testObj = { Data: Buffer.from(hat()), Links: [] } - ipfs.object.put(testObj, (err, nodeCid) => { - expect(err).to.not.exist() - - ipfs.object.data(nodeCid, (err, data) => { - expect(err).to.not.exist() - - // because js-ipfs-api can't infer - // if the returned Data is Buffer or String - if (typeof data === 'string') { - data = Buffer.from(data) - } - expect(testObj.Data).to.eql(data) - done() - }) - }) + const nodeCid = await ipfs.object.put(testObj) + + let data = await ipfs.object.data(nodeCid) + // because js-ipfs-api can't infer + // if the returned Data is Buffer or String + if (typeof data === 'string') { + data = Buffer.from(data) + } + expect(testObj.Data).to.deep.equal(data) }) - it('should get data by multihash (promised)', async () => { + it('should get data by base58 encoded multihash', async () => { const testObj = { Data: Buffer.from(hat()), Links: [] } const nodeCid = await ipfs.object.put(testObj) - let data = await ipfs.object.data(nodeCid) + let data = await ipfs.object.data(bs58.encode(nodeCid.buffer), { enc: 'base58' }) // because js-ipfs-api can't infer // if the returned Data is Buffer or String if (typeof data === 'string') { @@ -73,66 +56,29 @@ module.exports = (createCommon, options) => { expect(testObj.Data).to.deep.equal(data) }) - it('should get data by base58 encoded multihash', (done) => { + it('should get data by base58 encoded multihash string', async () => { const testObj = { Data: Buffer.from(hat()), Links: [] } - ipfs.object.put(testObj, (err, nodeCid) => { - expect(err).to.not.exist() - - ipfs.object.data(bs58.encode(nodeCid.buffer), { enc: 'base58' }, (err, data) => { - expect(err).to.not.exist() - - // because js-ipfs-api can't infer - // if the returned Data is Buffer or String - if (typeof data === 'string') { - data = Buffer.from(data) - } - expect(testObj.Data).to.eql(data) - done() - }) - }) - }) + const nodeCid = await ipfs.object.put(testObj) - it('should get data by base58 encoded multihash string', (done) => { - const testObj = { - Data: Buffer.from(hat()), - Links: [] + let data = await ipfs.object.data(bs58.encode(nodeCid.buffer).toString(), { enc: 'base58' }) + // because js-ipfs-api can't infer if the + // returned Data is Buffer or String + if (typeof data === 'string') { + data = Buffer.from(data) } - - ipfs.object.put(testObj, (err, nodeCid) => { - expect(err).to.not.exist() - - ipfs.object.data(bs58.encode(nodeCid.buffer).toString(), { enc: 'base58' }, (err, data) => { - expect(err).to.not.exist() - - // because js-ipfs-api can't infer if the - // returned Data is Buffer or String - if (typeof data === 'string') { - data = Buffer.from(data) - } - expect(testObj.Data).to.eql(data) - done() - }) - }) + expect(testObj.Data).to.eql(data) }) it('returns error for request without argument', () => { - return ipfs.object.data(null) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.data(null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) it('returns error for request with invalid argument', () => { - ipfs.object.data('invalid', { enc: 'base58' }) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.data('invalid', { enc: 'base58' })).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) }) } diff --git a/src/object/get.js b/src/object/get.js index 4bac9c93..1ca870d2 100644 --- a/src/object/get.js +++ b/src/object/get.js @@ -3,7 +3,6 @@ const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode -const series = require('async/series') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') const UnixFs = require('ipfs-unixfs') @@ -20,76 +19,19 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should get object by multihash', (done) => { + it('should get object by multihash', async () => { const obj = { Data: Buffer.from(hat()), Links: [] } - let node1 - let node1Cid - let node2 - - series([ - (cb) => { - ipfs.object.put(obj, (err, cid) => { - expect(err).to.not.exist() - node1Cid = cid - - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() - node1 = node - cb() - }) - }) - }, - (cb) => { - ipfs.object.get(node1Cid, (err, node) => { - expect(err).to.not.exist() - - // because js-ipfs-api can't infer if the - // returned Data is Buffer or String - if (typeof node.Data === 'string') { - node = new DAGNode(Buffer.from(node.Data), node.Links, node.size) - } - - node2 = node - - cb() - }) - }, - (cb) => { - expect(node1.Data).to.eql(node2.Data) - expect(node1.Links).to.eql(node2.Links) - cb() - } - ], done) - }) - - it('should get object by multihash (promised)', async () => { - const testObj = { - Data: Buffer.from(hat()), - Links: [] - } - - const node1Cid = await ipfs.object.put(testObj) + const node1Cid = await ipfs.object.put(obj) const node1 = await ipfs.object.get(node1Cid) let node2 = await ipfs.object.get(node1Cid) @@ -99,56 +41,11 @@ module.exports = (createCommon, options) => { node2 = new DAGNode(Buffer.from(node2.Data), node2.Links, node2.size) } - expect(node1.Data).to.deep.equal(node2.Data) - expect(node1.Links).to.deep.equal(node2.Links) - }) - - it('should get object by multihash string', (done) => { - const obj = { - Data: Buffer.from(hat()), - Links: [] - } - - let node1 - let node1Cid - let node2 - - series([ - (cb) => { - ipfs.object.put(obj, (err, cid) => { - expect(err).to.not.exist() - node1Cid = cid - - ipfs.object.get(node1Cid, (err, node) => { - expect(err).to.not.exist() - node1 = node - cb() - }) - }) - }, - (cb) => { - // get object from ipfs multihash string - ipfs.object.get(node1Cid.toBaseEncodedString(), (err, node) => { - expect(err).to.not.exist() - // because js-ipfs-api can't infer if the - // returned Data is Buffer or String - if (typeof node.Data === 'string') { - node = new DAGNode(Buffer.from(node.Data), node.Links, node.size) - } - - node2 = node - cb() - }) - }, - (cb) => { - expect(node1.Data).to.eql(node2.Data) - expect(node1.Links).to.eql(node2.Links) - cb() - } - ], done) + expect(node1.Data).to.eql(node2.Data) + expect(node1.Links).to.eql(node2.Links) }) - it('should get object by multihash string (promised)', async () => { + it('should get object by multihash string', async () => { const obj = { Data: Buffer.from(hat()), Links: [] @@ -168,189 +65,86 @@ module.exports = (createCommon, options) => { expect(node1.Links).to.deep.equal(node2.Links) }) - it('should get object with links by multihash string', (done) => { - let node1a - let node1b - let node1bCid - let node1c - let node2 - - series([ - (cb) => { - try { - node1a = new DAGNode(Buffer.from('Some data 1')) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - try { - node2 = new DAGNode(Buffer.from('Some data 2')) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - asDAGLink(node2, 'some-link', (err, link) => { - if (err) { - return cb(err) - } - - node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) - - cb() - }) - }, - (cb) => { - ipfs.object.put(node1b, (err, cid) => { - expect(err).to.not.exist() - node1bCid = cid - cb() - }) - }, - (cb) => { - ipfs.object.get(node1bCid, (err, node) => { - expect(err).to.not.exist() - - // because js-ipfs-api can't infer if the - // returned Data is Buffer or String - if (typeof node.Data === 'string') { - node = new DAGNode(Buffer.from(node.Data), node.Links, node.size) - } - - node1c = node - cb() - }) - }, - (cb) => { - expect(node1a.Data).to.eql(node1c.Data) - cb() - } - ], done) + it('should get object with links by multihash string', async () => { + const node1a = new DAGNode(Buffer.from('Some data 1')) + const node2 = new DAGNode(Buffer.from('Some data 2')) + + const link = await asDAGLink(node2, 'some-link') + const node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) + + const node1bCid = await ipfs.object.put(node1b) + let node1c = await ipfs.object.get(node1bCid) + + // because js-ipfs-api can't infer if the + // returned Data is Buffer or String + if (typeof node1c.Data === 'string') { + node1c = new DAGNode(Buffer.from(node1c.Data), node1c.Links, node1c.size) + } + + expect(node1a.Data).to.eql(node1c.Data) }) - it('should get object by base58 encoded multihash', (done) => { + it('should get object by base58 encoded multihash', async () => { const obj = { Data: Buffer.from(hat()), Links: [] } - let node1a - let node1aCid - let node1b - - series([ - (cb) => { - ipfs.object.put(obj, (err, cid) => { - expect(err).to.not.exist() - node1aCid = cid - - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() - node1a = node - cb() - }) - }) - }, - (cb) => { - ipfs.object.get(node1aCid, { enc: 'base58' }, (err, node) => { - expect(err).to.not.exist() - // because js-ipfs-api can't infer if the - // returned Data is Buffer or String - if (typeof node.Data === 'string') { - node = new DAGNode(Buffer.from(node.Data), node.Links, node.size) - } - node1b = node - cb() - }) - }, - (cb) => { - expect(node1a.Data).to.eql(node1b.Data) - expect(node1a.Links).to.eql(node1b.Links) - cb() - } - ], done) + const node1aCid = await ipfs.object.put(obj) + const node1a = await ipfs.object.get(node1aCid) + let node1b = await ipfs.object.get(node1aCid, { enc: 'base58' }) + + // because js-ipfs-api can't infer if the + // returned Data is Buffer or String + if (typeof node1b.Data === 'string') { + node1b = new DAGNode(Buffer.from(node1b.Data), node1b.Links, node1b.size) + } + + expect(node1a.Data).to.eql(node1b.Data) + expect(node1a.Links).to.eql(node1b.Links) }) - it('should get object by base58 encoded multihash string', (done) => { + it('should get object by base58 encoded multihash string', async () => { const obj = { Data: Buffer.from(hat()), Links: [] } - let node1a - let node1aCid - let node1b - - series([ - (cb) => { - ipfs.object.put(obj, (err, cid) => { - expect(err).to.not.exist() - node1aCid = cid - - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() - node1a = node - cb() - }) - }) - }, - (cb) => { - ipfs.object.get(node1aCid.toBaseEncodedString(), { enc: 'base58' }, (err, node) => { - expect(err).to.not.exist() - // because js-ipfs-api can't infer if the - // returned Data is Buffer or String - if (typeof node.Data === 'string') { - node = new DAGNode(Buffer.from(node.Data), node.Links, node.size) - } - node1b = node - cb() - }) - }, - (cb) => { - expect(node1a.Data).to.eql(node1b.Data) - expect(node1a.Links).to.eql(node1b.Links) - cb() - } - ], done) + const node1aCid = await ipfs.object.put(obj) + const node1a = await ipfs.object.get(node1aCid) + let node1b = await ipfs.object.get(node1aCid.toBaseEncodedString(), { enc: 'base58' }) + + // because js-ipfs-api can't infer if the + // returned Data is Buffer or String + if (typeof node1b.Data === 'string') { + node1b = new DAGNode(Buffer.from(node1b.Data), node1b.Links, node1b.size) + } + + expect(node1a.Data).to.eql(node1b.Data) + expect(node1a.Links).to.eql(node1b.Links) }) - it('should supply unaltered data', () => { + it('should supply unaltered data', async () => { // has to be big enough to span several DAGNodes const data = crypto.randomBytes(1024 * 3000) - return ipfs.add({ + const result = await ipfs.add({ path: '', content: data }) - .then((result) => { - return ipfs.object.get(result[0].hash) - }) - .then((node) => { - const meta = UnixFs.unmarshal(node.Data) - - expect(meta.fileSize()).to.equal(data.length) - }) + + const node = await ipfs.object.get(result[0].hash) + const meta = UnixFs.unmarshal(node.Data) + + expect(meta.fileSize()).to.equal(data.length) }) it('should error for request without argument', () => { - return ipfs.object.get(null) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.get(null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) it('returns error for request with invalid argument', () => { - return ipfs.object.get('invalid', { enc: 'base58' }) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.get('invalid', { enc: 'base58' })).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) }) } diff --git a/src/object/links.js b/src/object/links.js index 2dd51d4e..10400d91 100644 --- a/src/object/links.js +++ b/src/object/links.js @@ -4,7 +4,6 @@ const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode -const series = require('async/series') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') const { asDAGLink } = require('./utils') @@ -20,45 +19,43 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should get empty links by multihash', (done) => { + it('should get empty links by multihash', async () => { const testObj = { Data: Buffer.from(hat()), Links: [] } - ipfs.object.put(testObj, (err, cid) => { - expect(err).to.not.exist() + const cid = await ipfs.object.put(testObj) + const node = await ipfs.object.get(cid) + const links = await ipfs.object.links(cid) + + expect(node.Links).to.eql(links) + }) + + it('should get links by multihash', async () => { + const node1a = new DAGNode(Buffer.from('Some data 1')) + const node2 = new DAGNode(Buffer.from('Some data 2')) - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() + const link = await asDAGLink(node2, 'some-link') - ipfs.object.links(cid, (err, links) => { - expect(err).to.not.exist() - expect(node.Links).to.deep.equal(links) - done() - }) - }) + const node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) + const node1bCid = await ipfs.object.put(node1b) + + const links = await ipfs.object.links(node1bCid) + expect(node1b.Links[0]).to.eql({ + Hash: links[0].Hash, + Tsize: links[0].Tsize, + Name: links[0].Name }) }) - it('should get empty links by multihash (promised)', async () => { + it('should get links by base58 encoded multihash', async () => { const testObj = { Data: Buffer.from(hat()), Links: [] @@ -66,161 +63,61 @@ module.exports = (createCommon, options) => { const cid = await ipfs.object.put(testObj) const node = await ipfs.object.get(cid) - const links = await ipfs.object.links(cid) - - expect(node.Links).to.eql(links) - }) - it('should get links by multihash', (done) => { - let node1a - let node1b - let node1bCid - let node2 - - series([ - (cb) => { - try { - node1a = new DAGNode(Buffer.from('Some data 1')) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - try { - node2 = new DAGNode(Buffer.from('Some data 2')) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - asDAGLink(node2, 'some-link', (err, link) => { - expect(err).to.not.exist() - - node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) - - cb() - }) - }, - (cb) => { - ipfs.object.put(node1b, (err, cid) => { - expect(err).to.not.exist() - - node1bCid = cid - - cb() - }) - }, - (cb) => { - ipfs.object.links(node1bCid, (err, links) => { - expect(err).to.not.exist() - expect(node1b.Links[0]).to.eql({ - Hash: links[0].Hash, - Tsize: links[0].Tsize, - Name: links[0].Name - }) - cb() - }) - } - ], done) + const links = await ipfs.object.links(cid.buffer, { enc: 'base58' }) + expect(node.Links).to.deep.equal(links) }) - it('should get links by base58 encoded multihash', (done) => { + it('should get links by base58 encoded multihash string', async () => { const testObj = { Data: Buffer.from(hat()), Links: [] } - ipfs.object.put(testObj, (err, cid) => { - expect(err).to.not.exist() - - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() + const cid = await ipfs.object.put(testObj) + const node = await ipfs.object.get(cid) - ipfs.object.links(cid.buffer, { enc: 'base58' }, (err, links) => { - expect(err).to.not.exist() - expect(node.Links).to.deep.equal(links) - done() - }) - }) - }) + const links = await ipfs.object.links(cid.toBaseEncodedString(), { enc: 'base58' }) + expect(node.Links).to.deep.equal(links) }) - it('should get links by base58 encoded multihash string', (done) => { - const testObj = { - Data: Buffer.from(hat()), - Links: [] - } + it('should get links from CBOR object', async () => { + const hashes = [] - ipfs.object.put(testObj, (err, cid) => { - expect(err).to.not.exist() + const res1 = await ipfs.add(Buffer.from('test data')) + hashes.push(res1[0].hash) - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() + const res2 = await ipfs.add(Buffer.from('more test data')) + hashes.push(res2[0].hash) - ipfs.object.links(cid.toBaseEncodedString(), { enc: 'base58' }, (err, links) => { - expect(err).to.not.exist() - expect(node.Links).to.deep.equal(links) - done() - }) - }) - }) - }) + const obj = { + some: 'data', + mylink: new CID(hashes[0]), + myobj: { + anotherLink: new CID(hashes[1]) + } + } + const cid = await ipfs.dag.put(obj) - it('should get links from CBOR object', (done) => { - const hashes = [] - ipfs.add(Buffer.from('test data'), (err, res1) => { - expect(err).to.not.exist() - hashes.push(res1[0].hash) - ipfs.add(Buffer.from('more test data'), (err, res2) => { - hashes.push(res2[0].hash) - expect(err).to.not.exist() - const obj = { - some: 'data', - mylink: new CID(hashes[0]), - myobj: { - anotherLink: new CID(hashes[1]) - } - } - ipfs.dag.put(obj, (err, cid) => { - expect(err).to.not.exist() - ipfs.object.links(cid, (err, links) => { - expect(err).to.not.exist() - expect(links.length).to.eql(2) - - // TODO: js-ipfs succeeds but go returns empty strings for link name - // const names = [links[0].name, links[1].name] - // expect(names).includes('mylink') - // expect(names).includes('myobj/anotherLink') - - const cids = [links[0].Hash.toString(), links[1].Hash.toString()] - expect(cids).includes(hashes[0]) - expect(cids).includes(hashes[1]) - - done() - }) - }) - }) - }) + const links = await ipfs.object.links(cid) + expect(links.length).to.eql(2) + + // TODO: js-ipfs succeeds but go returns empty strings for link name + // const names = [links[0].name, links[1].name] + // expect(names).includes('mylink') + // expect(names).includes('myobj/anotherLink') + + const cids = [links[0].Hash.toString(), links[1].Hash.toString()] + expect(cids).includes(hashes[0]) + expect(cids).includes(hashes[1]) }) it('returns error for request without argument', () => { - return ipfs.object.links(null) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.links(null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) it('returns error for request with invalid argument', () => { - ipfs.object.links('invalid', { enc: 'base58' }) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.links('invalid', { enc: 'base58' })).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) }) } diff --git a/src/object/new.js b/src/object/new.js index bcd78c14..b4732932 100644 --- a/src/object/new.js +++ b/src/object/new.js @@ -13,45 +13,18 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should create a new object with no template', (done) => { - ipfs.object.new((err, cid) => { - expect(err).to.not.exist() - expect(cid.toBaseEncodedString()).to.equal('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') - done() - }) - }) - - it('should create a new object with no template (promised)', async () => { + it('should create a new object with no template', async () => { const cid = await ipfs.object.new() expect(cid.toBaseEncodedString()).to.equal('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') }) - it('should create a new object with unixfs-dir template', (done) => { - ipfs.object.new('unixfs-dir', (err, cid) => { - expect(err).to.not.exist() - expect(cid.toBaseEncodedString()).to.equal('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') - done() - }) - }) - - it('should create a new object with unixfs-dir template (promised)', async () => { + it('should create a new object with unixfs-dir template', async () => { const cid = await ipfs.object.new('unixfs-dir') expect(cid.toBaseEncodedString()).to.equal('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') }) diff --git a/src/object/patch/add-link.js b/src/object/patch/add-link.js index bf571c21..c221f051 100644 --- a/src/object/patch/add-link.js +++ b/src/object/patch/add-link.js @@ -3,14 +3,8 @@ const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode -const series = require('async/series') const { getDescribe, getIt, expect } = require('../../utils/mocha') -const { - calculateCid, - createDAGNode, - addLinkToDAGNode, - asDAGLink -} = require('../utils') +const { asDAGLink } = require('../utils') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -22,162 +16,63 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) - - it('should add a link to an existing node', (done) => { - let testNodeCid - let node1bCid - let node1a - let node1b - let node2 + after(() => common.teardown()) + it('should add a link to an existing node', async () => { const obj = { Data: Buffer.from('patch test object'), Links: [] } - - series([ - (cb) => { - ipfs.object.put(obj, (err, cid) => { - expect(err).to.not.exist() - testNodeCid = cid - cb() - }) - }, - (cb) => { - try { - node1a = new DAGNode(obj.Data, obj.Links) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - try { - node2 = new DAGNode(Buffer.from('some other node')) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - // note: we need to put the linked obj, otherwise IPFS won't - // timeout. Reason: it needs the node to get its size - ipfs.object.put(node2, (err, cid) => { - expect(err).to.not.exist() - - cb() - }) - }, - (cb) => { - asDAGLink(node2, 'link-to-node', (err, link) => { - expect(err).to.not.exist() - - node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) - - cb() - }) - }, - (cb) => { - ipfs.object.put(node1b, (err, cid) => { - expect(err).to.not.exist() - - node1bCid = cid - - cb() - }) - }, - (cb) => { - ipfs.object.patch.addLink(testNodeCid, node1b.Links[0], (err, cid) => { - expect(err).to.not.exist() - expect(node1bCid).to.eql(cid) - cb() - }) - } - /* TODO: revisit this assertions. - (cb) => { - // note: make sure we can link js plain objects - const content = Buffer.from(JSON.stringify({ - title: 'serialized object' - }, null, 0)) - ipfs.add(content, (err, result) => { - expect(err).to.not.exist() - expect(result).to.exist() - expect(result).to.have.lengthOf(1) - const object = result.pop() - node3 = { - name: object.hash, - multihash: object.hash, - size: object.size - } - cb() - }) - }, - (cb) => { - ipfs.object.patch.addLink(testNodeWithLinkMultihash, node3, (err, node) => { - expect(err).to.not.exist() - expect(node).to.exist() - testNodeWithLinkMultihash = node.multihash - testLinkPlainObject = node3 - cb() - }) - } - */ - ], done) - }) - - it('should add a link to an existing node (promised)', async () => { - const obj = { - Data: Buffer.from('patch test object (promised)'), - Links: [] + // link to add + const node2 = new DAGNode(Buffer.from('some other node')) + // note: we need to put the linked obj, otherwise IPFS won't + // timeout. Reason: it needs the node to get its size + await ipfs.object.put(node2) + const link = await asDAGLink(node2, 'link-to-node') + + // manual create dag step by step + const node1a = new DAGNode(obj.Data, obj.Links) + const node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) + const node1bCid = await ipfs.object.put(node1b) + + // add link with patch.addLink + const testNodeCid = await ipfs.object.put(obj) + const cid = await ipfs.object.patch.addLink(testNodeCid, link) + + // assert both are equal + expect(node1bCid).to.eql(cid) + + /* TODO: revisit this assertions. + // note: make sure we can link js plain objects + const content = Buffer.from(JSON.stringify({ + title: 'serialized object' + }, null, 0)) + const result = await ipfs.add(content) + expect(result).to.exist() + expect(result).to.have.lengthOf(1) + const object = result.pop() + const node3 = { + name: object.hash, + multihash: object.hash, + size: object.size } - - const parentCid = await ipfs.object.put(obj) - const parent = await ipfs.object.get(parentCid) - const childCid = await ipfs.object.put(await createDAGNode(Buffer.from('some other node'), [])) - const child = await ipfs.object.get(childCid) - const newParent = await addLinkToDAGNode(parent, { - name: 'link-to-node', - size: child.size, - cid: childCid - }) - const newParentCid = await calculateCid(newParent) - const nodeFromObjectPatchCid = await ipfs.object.patch.addLink(parentCid, newParent.Links[0]) - - expect(newParentCid).to.eql(nodeFromObjectPatchCid) + const node = await ipfs.object.patch.addLink(testNodeWithLinkMultihash, node3) + expect(node).to.exist() + testNodeWithLinkMultihash = node.multihash + testLinkPlainObject = node3 + */ }) it('returns error for request without arguments', () => { - return ipfs.object.patch.addLink(null, null, null) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.patch.addLink(null, null, null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) it('returns error for request with only one invalid argument', () => { - return ipfs.object.patch.addLink('invalid', null, null) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.patch.addLink('invalid', null, null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) }) } diff --git a/src/object/patch/append-data.js b/src/object/patch/append-data.js index 9d7b56a8..3a35a958 100644 --- a/src/object/patch/append-data.js +++ b/src/object/patch/append-data.js @@ -14,68 +14,31 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should append data to an existing node', (done) => { + it('should append data to an existing node', async () => { const obj = { Data: Buffer.from('patch test object'), Links: [] } - ipfs.object.put(obj, (err, nodeCid) => { - expect(err).to.not.exist() - - ipfs.object.patch.appendData(nodeCid, Buffer.from('append'), (err, patchedNodeCid) => { - expect(err).to.not.exist() - expect(patchedNodeCid).to.not.deep.equal(nodeCid) - done() - }) - }) - }) - - it('should append data to an existing node (promised)', async () => { - const obj = { - Data: Buffer.from('patch test object (promised)'), - Links: [] - } - const nodeCid = await ipfs.object.put(obj) const patchedNodeCid = await ipfs.object.patch.appendData(nodeCid, Buffer.from('append')) - - expect(nodeCid).to.not.deep.equal(patchedNodeCid) + expect(patchedNodeCid).to.not.deep.equal(nodeCid) }) it('returns error for request without key & data', () => { - return ipfs.object.patch.appendData(null, null) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.patch.appendData(null, null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) it('returns error for request without data', () => { const filePath = 'test/fixtures/test-data/badnode.json' - return ipfs.object.patch.appendData(null, filePath) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.patch.appendData(null, filePath)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) }) } diff --git a/src/object/patch/rm-link.js b/src/object/patch/rm-link.js index 03195bf7..33122683 100644 --- a/src/object/patch/rm-link.js +++ b/src/object/patch/rm-link.js @@ -1,9 +1,6 @@ /* eslint-env mocha */ 'use strict' -const dagPB = require('ipld-dag-pb') -const DAGLink = dagPB.DAGLink -const series = require('async/series') const { getDescribe, getIt, expect } = require('../../utils/mocha') const { asDAGLink } = require('../utils') @@ -17,90 +14,13 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) - - it('should remove a link from an existing node', (done) => { - let node1aCid - let node1bCid - let node2 - let node2Cid - let testLink + after(() => common.teardown()) - const obj1 = { - Data: Buffer.from('patch test object 1'), - Links: [] - } - - const obj2 = { - Data: Buffer.from('patch test object 2'), - Links: [] - } - - series([ - (cb) => { - ipfs.object.put(obj1, (err, cid) => { - expect(err).to.not.exist() - node1aCid = cid - cb() - }) - }, - (cb) => { - ipfs.object.put(obj2, (err, cid) => { - expect(err).to.not.exist() - node2Cid = cid - - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() - node2 = node - cb() - }) - }) - }, - (cb) => { - testLink = new DAGLink('link-to-node', node2.size, node2Cid) - - ipfs.object.patch.addLink(node1aCid, testLink, (err, cid) => { - expect(err).to.not.exist() - node1bCid = cid - cb() - }) - }, - (cb) => { - ipfs.object.patch.rmLink(node1bCid, testLink, (err, cid) => { - expect(err).to.not.exist() - expect(cid).to.not.deep.equal(node1bCid) - expect(cid).to.deep.equal(node1aCid) - cb() - }) - } - /* TODO: revisit this assertions. - (cb) => { - ipfs.object.patch.rmLink(testNodeWithLinkMultihash, testLinkPlainObject, (err, node) => { - expect(err).to.not.exist() - expect(node.multihash).to.not.deep.equal(testNodeWithLinkMultihash) - cb() - }) - } - */ - ], done) - }) - - it('should remove a link from an existing node (promised)', async () => { + it('should remove a link from an existing node', async () => { const obj1 = { Data: Buffer.from('patch test object 1'), Links: [] @@ -120,33 +40,29 @@ module.exports = (createCommon, options) => { expect(withoutChildCid).to.not.deep.equal(parentCid) expect(withoutChildCid).to.deep.equal(nodeCid) + + /* TODO: revisit this assertions. + const node = await ipfs.object.patch.rmLink(testNodeWithLinkMultihash, testLinkPlainObject) + expect(node.multihash).to.not.deep.equal(testNodeWithLinkMultihash) + */ }) it('returns error for request without arguments', () => { - return ipfs.object.patch.rmLink(null, null) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.patch.rmLink(null, null)).to.eventually.be.rejected + .and.be.an.instanceOf(Error) }) it('returns error for request only one invalid argument', () => { - return ipfs.object.patch.rmLink('invalid', null) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.patch.rmLink('invalid', null)).to.eventually.be.rejected + .and.be.an.instanceOf(Error) }) it('returns error for request with invalid first argument', () => { const root = '' const link = 'foo' - return ipfs.object.patch.rmLink(root, link) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.patch.rmLink(root, link)).to.eventually.be.rejected + .and.be.an.instanceOf(Error) }) }) } diff --git a/src/object/patch/set-data.js b/src/object/patch/set-data.js index 7a7c619b..b65afab9 100644 --- a/src/object/patch/set-data.js +++ b/src/object/patch/set-data.js @@ -14,53 +14,19 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should set data for an existing node', (done) => { + it('should set data for an existing node', async () => { const obj = { Data: Buffer.from('patch test object'), Links: [] } const patchData = Buffer.from('set') - ipfs.object.put(obj, (err, nodeCid) => { - expect(err).to.not.exist() - - ipfs.object.patch.setData(nodeCid, patchData, (err, patchedNodeCid) => { - expect(err).to.not.exist() - expect(nodeCid).to.not.deep.equal(patchedNodeCid) - - ipfs.object.get(patchedNodeCid, (err, patchedNode) => { - expect(err).to.not.exist() - expect(patchedNode.Data).to.eql(patchData) - done() - }) - }) - }) - }) - - it('should set data for an existing node (promised)', async () => { - const obj = { - Data: Buffer.from('patch test object (promised)'), - Links: [] - } - const patchData = Buffer.from('set') - const nodeCid = await ipfs.object.put(obj) const patchedNodeCid = await ipfs.object.patch.setData(nodeCid, patchData) const patchedNode = await ipfs.object.get(patchedNodeCid) @@ -70,21 +36,13 @@ module.exports = (createCommon, options) => { }) it('returns error for request without key & data', () => { - return ipfs.object.patch.setData(null, null) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.patch.setData(null, null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) it('returns error for request without data', () => { const filePath = 'test/fixtures/test-data/badnode.json' - return ipfs.object.patch.setData(null, filePath) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.patch.setData(null, filePath)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) }) } diff --git a/src/object/put.js b/src/object/put.js index f838a878..da9e885e 100644 --- a/src/object/put.js +++ b/src/object/put.js @@ -3,7 +3,6 @@ const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode -const series = require('async/series') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') const { asDAGLink } = require('./utils') @@ -18,43 +17,13 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should put an object', (done) => { - const obj = { - Data: Buffer.from(hat()), - Links: [] - } - - ipfs.object.put(obj, (err, cid) => { - expect(err).to.not.exist() - - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() - const nodeJSON = node.toJSON() - expect(nodeJSON.data).to.eql(obj.Data) - expect(nodeJSON.links).to.eql(obj.Links) - done() - }) - }) - }) - - it('should put an object (promised)', async () => { + it('should put an object', async () => { const obj = { Data: Buffer.from(hat()), Links: [] @@ -68,7 +37,7 @@ module.exports = (createCommon, options) => { expect(obj.Links).to.deep.equal(nodeJSON.links) }) - it('should put a JSON encoded Buffer', (done) => { + it('should put a JSON encoded Buffer', async () => { const obj = { Data: Buffer.from(hat()), Links: [] @@ -81,138 +50,58 @@ module.exports = (createCommon, options) => { const buf = Buffer.from(JSON.stringify(obj2)) - ipfs.object.put(buf, { enc: 'json' }, (err, cid) => { - expect(err).to.not.exist() + const cid = await ipfs.object.put(buf, { enc: 'json' }) - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() - const nodeJSON = node.toJSON() - expect(nodeJSON.data).to.eql(node.Data) - done() - }) - }) + const node = await ipfs.object.get(cid) + const nodeJSON = node.toJSON() + expect(nodeJSON.data).to.eql(node.Data) }) - it('should put a Protobuf encoded Buffer', (done) => { - let node - let serialized - - series([ - (cb) => { - try { - node = new DAGNode(Buffer.from(hat())) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - try { - serialized = node.serialize() - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - ipfs.object.put(serialized, { enc: 'protobuf' }, (err, cid) => { - expect(err).to.not.exist() - ipfs.object.get(cid, (err, node2) => { - expect(err).to.not.exist() - expect(node2.Data).to.deep.equal(node.Data) - expect(node2.Links).to.deep.equal(node.Links) - cb() - }) - }) - } - ], done) + it('should put a Protobuf encoded Buffer', async () => { + const node = new DAGNode(Buffer.from(hat())) + const serialized = node.serialize() + + const cid = await ipfs.object.put(serialized, { enc: 'protobuf' }) + const node2 = await ipfs.object.get(cid) + expect(node2.Data).to.deep.equal(node.Data) + expect(node2.Links).to.deep.equal(node.Links) }) - it('should put a Buffer as data', (done) => { + it('should put a Buffer as data', async () => { const data = Buffer.from(hat()) - ipfs.object.put(data, (err, cid) => { - expect(err).to.not.exist() - - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() - const nodeJSON = node.toJSON() - expect(data).to.deep.equal(nodeJSON.data) - expect([]).to.deep.equal(nodeJSON.links) - done() - }) - }) + + const cid = await ipfs.object.put(data) + const node = await ipfs.object.get(cid) + const nodeJSON = node.toJSON() + expect(data).to.deep.equal(nodeJSON.data) + expect([]).to.deep.equal(nodeJSON.links) }) - it('should put a Protobuf DAGNode', (done) => { + it('should put a Protobuf DAGNode', async () => { const dNode = new DAGNode(Buffer.from(hat())) - ipfs.object.put(dNode, (err, cid) => { - expect(err).to.not.exist() - - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() - expect(dNode.Data).to.deep.equal(node.Data) - expect(dNode.Links).to.deep.equal(node.Links) - done() - }) - }) + const cid = await ipfs.object.put(dNode) + const node = await ipfs.object.get(cid) + expect(dNode.Data).to.deep.equal(node.Data) + expect(dNode.Links).to.deep.equal(node.Links) }) - it('should fail if a string is passed', (done) => { - ipfs.object.put(hat(), (err) => { - expect(err).to.exist() - done() - }) + it('should fail if a string is passed', () => { + return expect(ipfs.object.put(hat())).to.eventually.be.rejected() }) - it('should put a Protobuf DAGNode with a link', (done) => { - let node1a - let node1b - let node2 - - series([ - (cb) => { - try { - node1a = new DAGNode(Buffer.from(hat())) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - try { - node2 = new DAGNode(Buffer.from(hat())) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - asDAGLink(node2, 'some-link', (err, link) => { - expect(err).to.not.exist() - - node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) - - cb() - }) - }, - (cb) => { - ipfs.object.put(node1b, (err, cid) => { - expect(err).to.not.exist() - - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() - expect(node1b.Data).to.deep.equal(node.Data) - expect(node1b.Links).to.deep.equal(node.Links) - cb() - }) - }) - } - ], done) + it('should put a Protobuf DAGNode with a link', async () => { + const node1a = new DAGNode(Buffer.from(hat())) + const node2 = new DAGNode(Buffer.from(hat())) + + const link = await asDAGLink(node2, 'some-link') + + const node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) + + const cid = await ipfs.object.put(node1b) + const node = await ipfs.object.get(cid) + expect(node1b.Data).to.deep.equal(node.Data) + expect(node1b.Links).to.deep.equal(node.Links) }) }) } diff --git a/src/object/stat.js b/src/object/stat.js index 3c1da06c..edac169c 100644 --- a/src/object/stat.js +++ b/src/object/stat.js @@ -4,7 +4,6 @@ const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode -const series = require('async/series') const { getDescribe, getIt, expect } = require('../utils/mocha') const { asDAGLink } = require('./utils') @@ -18,57 +17,20 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should get stats by multihash', (done) => { + it('should get stats by multihash', async () => { const testObj = { Data: Buffer.from('get test object'), Links: [] } - ipfs.object.put(testObj, (err, cid) => { - expect(err).to.not.exist() - - ipfs.object.stat(cid, (err, stats) => { - expect(err).to.not.exist() - const expected = { - Hash: 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', - NumLinks: 0, - BlockSize: 17, - LinksSize: 2, - DataSize: 15, - CumulativeSize: 17 - } - expect(expected).to.deep.equal(stats) - done() - }) - }) - }) - - it('should get stats for object by multihash (promised)', async () => { - const testObj = { - Data: Buffer.from('get test object'), - Links: [] - } - - await ipfs.object.put(testObj) - const stats = await ipfs.object.stat('QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ') - + const cid = await ipfs.object.put(testObj) + const stats = await ipfs.object.stat(cid) const expected = { Hash: 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', NumLinks: 0, @@ -77,158 +39,96 @@ module.exports = (createCommon, options) => { DataSize: 15, CumulativeSize: 17 } - expect(expected).to.deep.equal(stats) }) - it('should respect timeout option', (done) => { + it('should respect timeout option', async () => { const testObj = { Data: Buffer.from('get test object'), Links: [] } - ipfs.object.put(testObj, (err) => { - expect(err).to.not.exist() - const timeout = 2 - const startTime = new Date() - const badCid = 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3MzzzzzZ' - - // we can test that we are passing in opts by testing the timeout option for a CID that doesn't exist - ipfs.object.stat(badCid, { timeout: `${timeout}s` }, (err, stats) => { - const timeForRequest = (new Date() - startTime) / 1000 - expect(err).to.exist() - expect(err.message).to.equal('failed to get block for QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3MzzzzzZ: context deadline exceeded') - expect(stats).to.not.exist() - expect(timeForRequest).to.not.lessThan(timeout) - expect(timeForRequest).to.not.greaterThan(timeout + 1) - done() - }) - }) + await ipfs.object.put(testObj) + + const timeout = 2 + const startTime = new Date() + const badCid = 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3MzzzzzZ' + + const err = await expect(ipfs.object.stat(badCid, { timeout: `${timeout}s` })).to.be.rejected() + const timeForRequest = (new Date() - startTime) / 1000 + + expect(err).to.have.property('message', 'failed to get block for QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3MzzzzzZ: context deadline exceeded') + expect(timeForRequest).to.not.lessThan(timeout) + expect(timeForRequest).to.not.greaterThan(timeout + 1) }) - it('should get stats for object with links by multihash', (done) => { - let node1a - let node1b - let node1bCid - let node2 - - series([ - (cb) => { - try { - node1a = new DAGNode(Buffer.from('Some data 1')) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - try { - node2 = new DAGNode(Buffer.from('Some data 2')) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - asDAGLink(node2, 'some-link', (err, link) => { - expect(err).to.not.exist() - - node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) - - cb() - }) - }, - (cb) => { - ipfs.object.put(node1b, (err, cid) => { - expect(err).to.not.exist() - node1bCid = cid - cb() - }) - }, - (cb) => { - ipfs.object.stat(node1bCid, (err, stats) => { - expect(err).to.not.exist() - const expected = { - Hash: 'QmPR7W4kaADkAo4GKEVVPQN81EDUFCHJtqejQZ5dEG7pBC', - NumLinks: 1, - BlockSize: 64, - LinksSize: 53, - DataSize: 11, - CumulativeSize: 77 - } - expect(expected).to.eql(stats) - cb() - }) - } - ], done) + it('should get stats for object with links by multihash', async () => { + const node1a = new DAGNode(Buffer.from('Some data 1')) + const node2 = new DAGNode(Buffer.from('Some data 2')) + + const link = await asDAGLink(node2, 'some-link') + + const node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) + const node1bCid = await ipfs.object.put(node1b) + + const stats = await ipfs.object.stat(node1bCid) + const expected = { + Hash: 'QmPR7W4kaADkAo4GKEVVPQN81EDUFCHJtqejQZ5dEG7pBC', + NumLinks: 1, + BlockSize: 64, + LinksSize: 53, + DataSize: 11, + CumulativeSize: 77 + } + expect(expected).to.eql(stats) }) - it('should get stats by base58 encoded multihash', (done) => { + it('should get stats by base58 encoded multihash', async () => { const testObj = { Data: Buffer.from('get test object'), Links: [] } - ipfs.object.put(testObj, (err, cid) => { - expect(err).to.not.exist() - - ipfs.object.stat(cid.buffer, (err, stats) => { - expect(err).to.not.exist() - const expected = { - Hash: 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', - NumLinks: 0, - BlockSize: 17, - LinksSize: 2, - DataSize: 15, - CumulativeSize: 17 - } - expect(expected).to.deep.equal(stats) - done() - }) - }) + const cid = await ipfs.object.put(testObj) + + const stats = await ipfs.object.stat(cid.buffer) + const expected = { + Hash: 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', + NumLinks: 0, + BlockSize: 17, + LinksSize: 2, + DataSize: 15, + CumulativeSize: 17 + } + expect(expected).to.deep.equal(stats) }) - it('should get stats by base58 encoded multihash string', (done) => { + it('should get stats by base58 encoded multihash string', async () => { const testObj = { Data: Buffer.from('get test object'), Links: [] } - ipfs.object.put(testObj, (err, cid) => { - expect(err).to.not.exist() - - ipfs.object.stat(cid.toBaseEncodedString(), (err, stats) => { - expect(err).to.not.exist() - const expected = { - Hash: 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', - NumLinks: 0, - BlockSize: 17, - LinksSize: 2, - DataSize: 15, - CumulativeSize: 17 - } - expect(expected).to.deep.equal(stats) - done() - }) - }) + const cid = await ipfs.object.put(testObj) + + const stats = await ipfs.object.stat(cid.toBaseEncodedString()) + const expected = { + Hash: 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', + NumLinks: 0, + BlockSize: 17, + LinksSize: 2, + DataSize: 15, + CumulativeSize: 17 + } + expect(expected).to.deep.equal(stats) }) it('returns error for request without argument', () => { - return ipfs.object.stat(null) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.stat(null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) it('returns error for request with invalid argument', () => { - return ipfs.object.stat('invalid', { enc: 'base58' }) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.stat('invalid', { enc: 'base58' })).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) }) } diff --git a/src/object/utils.js b/src/object/utils.js index db206d98..f426dfd9 100644 --- a/src/object/utils.js +++ b/src/object/utils.js @@ -1,15 +1,10 @@ 'use strict' const { promisify } = require('es6-promisify') -const callbackify = require('callbackify') const dagPB = require('ipld-dag-pb') const { DAGNode, DAGLink } = dagPB -const calculateCid = callbackify((node) => { - return dagPB.util.cid(node.serialize(), { - cidVersion: 0 - }) -}) +const calculateCid = (node) => dagPB.util.cid(node.serialize(), { cidVersion: 0 }) const createDAGNode = promisify((data, links, cb) => { cb(null, new DAGNode(data, links)) @@ -19,20 +14,11 @@ const addLinkToDAGNode = promisify((parent, link, cb) => { cb(null, new DAGNode(parent.Data, parent.Links.concat(link))) }) -const asDAGLink = promisify((node, name, cb) => { - if (typeof name === 'function') { - cb = name - name = '' - } - - calculateCid(node, (err, cid) => { - if (err) { - return cb(err) - } +const asDAGLink = async (node, name = '') => { + const cid = await calculateCid(node) - cb(null, new DAGLink(name, node.size, cid)) - }) -}) + return new DAGLink(name, node.size, cid) +} module.exports = { calculateCid, diff --git a/src/pin/add.js b/src/pin/add.js index 45ff9797..379ff654 100644 --- a/src/pin/add.js +++ b/src/pin/add.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const each = require('async/each') const { fixtures } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -11,50 +10,24 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.pin.add', function () { - this.timeout(50 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - populate() - }) - }) - - function populate () { - each(fixtures.files, (file, cb) => { - ipfs.add(file.data, { pin: false }, cb) - }, done) - } + before(async () => { + ipfs = await common.setup() + await Promise.all(fixtures.files.map(file => { + return ipfs.add(file.data, { pin: false }) + })) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should add a pin', (done) => { - ipfs.pin.add(fixtures.files[0].cid, { recursive: false }, (err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.deep.include({ - hash: fixtures.files[0].cid - }) - done() + it('should add a pin', async () => { + const pinset = await ipfs.pin.add(fixtures.files[0].cid, { recursive: false }) + expect(pinset).to.deep.include({ + hash: fixtures.files[0].cid }) }) - - it('should add a pin (promised)', () => { - return ipfs.pin.add(fixtures.files[1].cid, { recursive: false }) - .then((pinset) => { - expect(pinset).to.deep.include({ - hash: fixtures.files[1].cid - }) - }) - }) }) } diff --git a/src/pin/ls.js b/src/pin/ls.js index 691350e1..1b93d53d 100644 --- a/src/pin/ls.js +++ b/src/pin/ls.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const { fixtures } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -11,215 +10,135 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.pin.ls', function () { - this.timeout(50 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - populate() - }) - }) - - function populate () { - series([ - // two files wrapped in directories, only root CID pinned recursively - cb => { - const dir = fixtures.directory.files.map((file) => ({ path: file.path, content: file.data })) - ipfs.add(dir, { pin: false, cidVersion: 0 }, cb) - }, - cb => ipfs.pin.add(fixtures.directory.cid, { recursive: true }, cb), - // a file (CID pinned recursively) - cb => ipfs.add(fixtures.files[0].data, { pin: false, cidVersion: 0 }, cb), - cb => ipfs.pin.add(fixtures.files[0].cid, { recursive: true }, cb), - // a single CID (pinned directly) - cb => ipfs.add(fixtures.files[1].data, { pin: false, cidVersion: 0 }, cb), - cb => ipfs.pin.add(fixtures.files[1].cid, { recursive: false }, cb) - ], done) - } + before(async () => { + ipfs = await common.setup() + // two files wrapped in directories, only root CID pinned recursively + const dir = fixtures.directory.files.map((file) => ({ path: file.path, content: file.data })) + await ipfs.add(dir, { pin: false, cidVersion: 0 }) + await ipfs.pin.add(fixtures.directory.cid, { recursive: true }) + // a file (CID pinned recursively) + await ipfs.add(fixtures.files[0].data, { pin: false, cidVersion: 0 }) + await ipfs.pin.add(fixtures.files[0].cid, { recursive: true }) + // a single CID (pinned directly) + await ipfs.add(fixtures.files[1].data, { pin: false, cidVersion: 0 }) + await ipfs.pin.add(fixtures.files[1].cid, { recursive: false }) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) // 1st, because ipfs.add pins automatically - it('should list all recursive pins', (done) => { - ipfs.pin.ls({ type: 'recursive' }, (err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.deep.include({ - type: 'recursive', - hash: fixtures.files[0].cid - }) - expect(pinset).to.deep.include({ - type: 'recursive', - hash: fixtures.directory.cid - }) - done() + it('should list all recursive pins', async () => { + const pinset = await ipfs.pin.ls({ type: 'recursive' }) + expect(pinset).to.deep.include({ + type: 'recursive', + hash: fixtures.files[0].cid }) - }) - - it('should list all indirect pins', (done) => { - ipfs.pin.ls({ type: 'indirect' }, (err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.not.deep.include({ - type: 'recursive', - hash: fixtures.files[0].cid - }) - expect(pinset).to.not.deep.include({ - type: 'direct', - hash: fixtures.files[1].cid - }) - expect(pinset).to.not.deep.include({ - type: 'recursive', - hash: fixtures.directory.cid - }) - expect(pinset).to.deep.include({ - type: 'indirect', - hash: fixtures.directory.files[0].cid - }) - expect(pinset).to.deep.include({ - type: 'indirect', - hash: fixtures.directory.files[1].cid - }) - done() + expect(pinset).to.deep.include({ + type: 'recursive', + hash: fixtures.directory.cid }) }) - it('should list all types of pins', (done) => { - ipfs.pin.ls((err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.not.be.empty() - // check the three "roots" - expect(pinset).to.deep.include({ - type: 'recursive', - hash: fixtures.directory.cid - }) - expect(pinset).to.deep.include({ - type: 'recursive', - hash: fixtures.files[0].cid - }) - expect(pinset).to.deep.include({ - type: 'direct', - hash: fixtures.files[1].cid - }) - expect(pinset).to.deep.include({ - type: 'indirect', - hash: fixtures.directory.files[0].cid - }) - expect(pinset).to.deep.include({ - type: 'indirect', - hash: fixtures.directory.files[1].cid - }) - done() + it('should list all indirect pins', async () => { + const pinset = await ipfs.pin.ls({ type: 'indirect' }) + expect(pinset).to.not.deep.include({ + type: 'recursive', + hash: fixtures.files[0].cid + }) + expect(pinset).to.not.deep.include({ + type: 'direct', + hash: fixtures.files[1].cid + }) + expect(pinset).to.not.deep.include({ + type: 'recursive', + hash: fixtures.directory.cid + }) + expect(pinset).to.deep.include({ + type: 'indirect', + hash: fixtures.directory.files[0].cid + }) + expect(pinset).to.deep.include({ + type: 'indirect', + hash: fixtures.directory.files[1].cid }) }) - it('should list all types of pins (promised)', () => { - return ipfs.pin.ls() - .then((pinset) => { - expect(pinset).to.not.be.empty() - // check our three "roots" - expect(pinset).to.deep.include({ - type: 'recursive', - hash: fixtures.directory.cid - }) - expect(pinset).to.deep.include({ - type: 'recursive', - hash: fixtures.files[0].cid - }) - expect(pinset).to.deep.include({ - type: 'direct', - hash: fixtures.files[1].cid - }) - expect(pinset).to.deep.include({ - type: 'indirect', - hash: fixtures.directory.files[0].cid - }) - expect(pinset).to.deep.include({ - type: 'indirect', - hash: fixtures.directory.files[1].cid - }) - }) - }) - - it('should list all direct pins', (done) => { - ipfs.pin.ls({ type: 'direct' }, (err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.have.lengthOf(1) - expect(pinset).to.deep.include({ - type: 'direct', - hash: fixtures.files[1].cid - }) - done() + it('should list all types of pins', async () => { + const pinset = await ipfs.pin.ls() + expect(pinset).to.not.be.empty() + // check the three "roots" + expect(pinset).to.deep.include({ + type: 'recursive', + hash: fixtures.directory.cid + }) + expect(pinset).to.deep.include({ + type: 'recursive', + hash: fixtures.files[0].cid + }) + expect(pinset).to.deep.include({ + type: 'direct', + hash: fixtures.files[1].cid + }) + expect(pinset).to.deep.include({ + type: 'indirect', + hash: fixtures.directory.files[0].cid + }) + expect(pinset).to.deep.include({ + type: 'indirect', + hash: fixtures.directory.files[1].cid }) }) - it('should list pins for a specific hash', (done) => { - ipfs.pin.ls(fixtures.files[0].cid, (err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.deep.equal([{ - type: 'recursive', - hash: fixtures.files[0].cid - }]) - done() + it('should list all direct pins', async () => { + const pinset = await ipfs.pin.ls({ type: 'direct' }) + expect(pinset).to.have.lengthOf(1) + expect(pinset).to.deep.include({ + type: 'direct', + hash: fixtures.files[1].cid }) }) - it('should list pins for a specific hash (promised)', () => { - return ipfs.pin.ls(fixtures.files[0].cid) - .then((pinset) => { - expect(pinset).to.deep.equal([{ - type: 'recursive', - hash: fixtures.files[0].cid - }]) - }) + it('should list pins for a specific hash', async () => { + const pinset = await ipfs.pin.ls(fixtures.files[0].cid) + expect(pinset).to.deep.equal([{ + type: 'recursive', + hash: fixtures.files[0].cid + }]) }) - it('should throw an error on missing direct pins for existing path', (done) => { + it('should throw an error on missing direct pins for existing path', () => { // ipfs.txt is an indirect pin, so lookup for direct one should throw an error - ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/files/ipfs.txt`, { type: 'direct' }, (err, pinset) => { - expect(err).to.exist() - expect(pinset).to.not.exist() - expect(err.message).to.be.equal(`path '/ipfs/${fixtures.directory.cid}/files/ipfs.txt' is not pinned`) - done() - }) + return expect(ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/files/ipfs.txt`, { type: 'direct' })) + .to.eventually.be.rejected + .and.be.an.instanceOf(Error) + .and.to.have.property('message', `path '/ipfs/${fixtures.directory.cid}/files/ipfs.txt' is not pinned`) }) - it('should throw an error on missing link for a specific path', (done) => { - ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/I-DONT-EXIST.txt`, { type: 'direct' }, (err, pinset) => { - expect(err).to.exist() - expect(pinset).to.not.exist() - expect(err.message).to.be.equal(`no link named "I-DONT-EXIST.txt" under ${fixtures.directory.cid}`) - done() - }) + it('should throw an error on missing link for a specific path', () => { + return expect(ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/I-DONT-EXIST.txt`, { type: 'direct' })) + .to.eventually.be.rejected + .and.be.an.instanceOf(Error) + .and.to.have.property('message', `no link named "I-DONT-EXIST.txt" under ${fixtures.directory.cid}`) }) - it('should list indirect pins for a specific path', (done) => { - ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/files/ipfs.txt`, { type: 'indirect' }, (err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.deep.include({ - type: `indirect through ${fixtures.directory.cid}`, - hash: fixtures.directory.files[1].cid - }) - done() + it('should list indirect pins for a specific path', async () => { + const pinset = await ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/files/ipfs.txt`, { type: 'indirect' }) + expect(pinset).to.deep.include({ + type: `indirect through ${fixtures.directory.cid}`, + hash: fixtures.directory.files[1].cid }) }) - it('should list recursive pins for a specific hash (promised)', () => { - return ipfs.pin.ls(fixtures.files[0].cid, { type: 'recursive' }) - .then((pinset) => { - expect(pinset).to.deep.equal([{ - type: 'recursive', - hash: fixtures.files[0].cid - }]) - }) + it('should list recursive pins for a specific hash', async () => { + const pinset = await ipfs.pin.ls(fixtures.files[0].cid, { type: 'recursive' }) + expect(pinset).to.deep.equal([{ + type: 'recursive', + hash: fixtures.files[0].cid + }]) }) }) } diff --git a/src/pin/rm.js b/src/pin/rm.js index ba88b66f..7b8e5dc5 100644 --- a/src/pin/rm.js +++ b/src/pin/rm.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const { fixtures } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -11,66 +10,43 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.pin.rm', function () { - this.timeout(50 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - populate() - }) - }) - - function populate () { - series([ - cb => ipfs.add(fixtures.files[0].data, { pin: false }, cb), - cb => ipfs.pin.add(fixtures.files[0].cid, { recursive: true }, cb), - cb => ipfs.add(fixtures.files[1].data, { pin: false }, cb), - cb => ipfs.pin.add(fixtures.files[1].cid, { recursive: false }, cb) - ], done) - } + before(async () => { + ipfs = await common.setup() + await ipfs.add(fixtures.files[0].data, { pin: false }) + await ipfs.pin.add(fixtures.files[0].cid, { recursive: true }) + await ipfs.add(fixtures.files[1].data, { pin: false }) + await ipfs.pin.add(fixtures.files[1].cid, { recursive: false }) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should remove a recursive pin', (done) => { - ipfs.pin.rm(fixtures.files[0].cid, { recursive: true }, (err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.deep.equal([{ - hash: fixtures.files[0].cid - }]) - ipfs.pin.ls({ type: 'recursive' }, (err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.not.deep.include({ - hash: fixtures.files[0].cid, - type: 'recursive' - }) - done() - }) + it('should remove a recursive pin', async () => { + const removedPinset = await ipfs.pin.rm(fixtures.files[0].cid, { recursive: true }) + expect(removedPinset).to.deep.equal([{ + hash: fixtures.files[0].cid + }]) + + const pinset = await ipfs.pin.ls({ type: 'recursive' }) + expect(pinset).to.not.deep.include({ + hash: fixtures.files[0].cid, + type: 'recursive' }) }) - it('should remove a direct pin (promised)', () => { - return ipfs.pin.rm(fixtures.files[1].cid, { recursive: false }) - .then((pinset) => { - expect(pinset).to.deep.equal([{ - hash: fixtures.files[1].cid - }]) - return ipfs.pin.ls({ type: 'direct' }) - }) - .then((pinset) => { - expect(pinset).to.not.deep.include({ - hash: fixtures.files[1].cid - }) - }) + it('should remove a direct pin', async () => { + const removedPinset = await ipfs.pin.rm(fixtures.files[1].cid, { recursive: false }) + expect(removedPinset).to.deep.equal([{ + hash: fixtures.files[1].cid + }]) + + const pinset = await ipfs.pin.ls({ type: 'direct' }) + expect(pinset).to.not.deep.include({ + hash: fixtures.files[1].cid + }) }) }) } diff --git a/src/ping/ping-pull-stream.js b/src/ping/ping-pull-stream.js index 28b8d056..e9e65add 100644 --- a/src/ping/ping-pull-stream.js +++ b/src/ping/ping-pull-stream.js @@ -1,12 +1,9 @@ /* eslint-env mocha */ 'use strict' -const pull = require('pull-stream') -const series = require('async/series') -const { spawnNodesWithId } = require('../utils/spawn') +const pullToPromise = require('pull-to-promise') const { getDescribe, getIt, expect } = require('../utils/mocha') const { isPong } = require('./utils.js') -const { connect } = require('../utils/swarm') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -14,73 +11,51 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.pingPullStream', function () { - // TODO revisit when https://github.com/ipfs/go-ipfs/issues/5799 is resolved - this.timeout(2 * 60 * 1000) + this.timeout(60 * 1000) let ipfsA let ipfsB - before(function (done) { - common.setup((err, factory) => { - if (err) return done(err) - - series([ - (cb) => { - spawnNodesWithId(2, factory, (err, nodes) => { - if (err) return cb(err) - ipfsA = nodes[0] - ipfsB = nodes[1] - cb() - }) - }, - (cb) => connect(ipfsA, ipfsB.peerId.addresses[0], cb) - ], done) - }) + before(async () => { + ipfsA = await common.setup() + ipfsB = await common.setup() + await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should send the specified number of packets over pull stream', (done) => { - let packetNum = 0 + it('should send the specified number of packets over pull stream', async () => { const count = 3 - pull( - ipfsA.pingPullStream(ipfsB.peerId.id, { count }), - pull.drain((res) => { - expect(res.success).to.be.true() - // It's a pong - if (isPong(res)) { - packetNum++ - } - }, (err) => { - expect(err).to.not.exist() - expect(packetNum).to.equal(count) - done() - }) - ) + + const results = await pullToPromise.any(ipfsA.pingPullStream(ipfsB.peerId.id, { count })) + + const packetNum = results.reduce((acc, result) => { + expect(result.success).to.be.true() + + if (isPong(result)) { + acc++ + } + + return acc + }, 0) + + expect(packetNum).to.equal(count) }) - it('should fail when pinging an unknown peer over pull stream', (done) => { + it('should fail when pinging an unknown peer over pull stream', () => { const unknownPeerId = 'QmUmaEnH1uMmvckMZbh3yShaasvELPW4ZLPWnB4entMTEn' const count = 2 - pull( - ipfsA.pingPullStream(unknownPeerId, { count }), - pull.collect((err, results) => { - expect(err).to.exist() - done() - }) - ) + + return expect(pullToPromise.any(ipfsA.pingPullStream(unknownPeerId, { count }))) + .to.eventually.be.rejected() }) - it('should fail when pinging an invalid peer id over pull stream', (done) => { + it('should fail when pinging an invalid peer id over pull stream', () => { const invalidPeerId = 'not a peer ID' const count = 2 - pull( - ipfsA.pingPullStream(invalidPeerId, { count }), - pull.collect((err, results) => { - expect(err).to.exist() - done() - }) - ) + + return expect(pullToPromise.any(ipfsA.pingPullStream(invalidPeerId, { count }))) + .to.eventually.be.rejected() }) }) } diff --git a/src/ping/ping-readable-stream.js b/src/ping/ping-readable-stream.js index 4d057fb6..3c69fc36 100644 --- a/src/ping/ping-readable-stream.js +++ b/src/ping/ping-readable-stream.js @@ -3,11 +3,8 @@ const pump = require('pump') const { Writable } = require('stream') -const series = require('async/series') -const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') const { isPong } = require('./utils.js') -const { connect } = require('../utils/swarm') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -15,88 +12,81 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.pingReadableStream', function () { - // TODO revisit when https://github.com/ipfs/go-ipfs/issues/5799 is resolved - this.timeout(2 * 60 * 1000) + this.timeout(60 * 1000) let ipfsA let ipfsB - before(function (done) { - common.setup((err, factory) => { - if (err) return done(err) - - series([ - (cb) => { - spawnNodesWithId(2, factory, (err, nodes) => { - if (err) return cb(err) - ipfsA = nodes[0] - ipfsB = nodes[1] - cb() - }) - }, - (cb) => connect(ipfsA, ipfsB.peerId.addresses[0], cb) - ], done) - }) + before(async () => { + ipfsA = await common.setup() + ipfsB = await common.setup() + await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should send the specified number of packets over readable stream', (done) => { + it('should send the specified number of packets over readable stream', () => { let packetNum = 0 const count = 3 - pump( - ipfsA.pingReadableStream(ipfsB.peerId.id, { count }), - new Writable({ - objectMode: true, - write (res, enc, cb) { - expect(res.success).to.be.true() - // It's a pong - if (isPong(res)) { - packetNum++ - } + return new Promise((resolve, reject) => { + pump( + ipfsA.pingReadableStream(ipfsB.peerId.id, { count }), + new Writable({ + objectMode: true, + write (res, enc, cb) { + expect(res.success).to.be.true() + // It's a pong + if (isPong(res)) { + packetNum++ + } - cb() + cb() + } + }), + (err) => { + expect(err).to.not.exist() + expect(packetNum).to.equal(count) + resolve() } - }), - (err) => { - expect(err).to.not.exist() - expect(packetNum).to.equal(count) - done() - } - ) + ) + }) }) - it('should fail when pinging peer that is not available over readable stream', (done) => { + it('should fail when pinging peer that is not available over readable stream', () => { const unknownPeerId = 'QmUmaEnH1uMmvckMZbh3yShaasvELPW4ZLPWnB4entMTEn' - pump( - ipfsA.pingReadableStream(unknownPeerId, {}), - new Writable({ - objectMode: true, - write: (res, enc, cb) => cb() - }), - (err) => { - expect(err).to.exist() - done() - } - ) + return new Promise((resolve, reject) => { + pump( + ipfsA.pingReadableStream(unknownPeerId, {}), + new Writable({ + objectMode: true, + write: (res, enc, cb) => cb() + }), + (err) => { + expect(err).to.exist() + resolve() + } + ) + }) }) - it('should fail when pinging an invalid peer id over readable stream', (done) => { + it('should fail when pinging an invalid peer id over readable stream', () => { const invalidPeerId = 'not a peer ID' - pump( - ipfsA.pingReadableStream(invalidPeerId, {}), - new Writable({ - objectMode: true, - write: (chunk, enc, cb) => cb() - }), - (err) => { - expect(err).to.exist() - done() - } - ) + return new Promise((resolve, reject) => { + pump( + ipfsA.pingReadableStream(invalidPeerId, {}), + new Writable({ + objectMode: true, + write: (chunk, enc, cb) => cb() + }), + (err) => { + expect(err).to.exist() + resolve() + } + ) + }) }) }) } diff --git a/src/ping/ping.js b/src/ping/ping.js index 2a4f1b91..95b91482 100644 --- a/src/ping/ping.js +++ b/src/ping/ping.js @@ -1,11 +1,8 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') -const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') const { expectIsPingResponse, isPong } = require('./utils') -const { connect } = require('../utils/swarm') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -13,62 +10,40 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.ping', function () { - // TODO revisit when https://github.com/ipfs/go-ipfs/issues/5799 is resolved - this.timeout(2 * 60 * 1000) + this.timeout(60 * 1000) let ipfsA let ipfsB - before(function (done) { - this.timeout(60 * 1000) - - common.setup((err, factory) => { - if (err) return done(err) - - series([ - (cb) => { - spawnNodesWithId(2, factory, (err, nodes) => { - if (err) return cb(err) - ipfsA = nodes[0] - ipfsB = nodes[1] - cb() - }) - }, - (cb) => connect(ipfsA, ipfsB.peerId.addresses[0], cb) - ], done) - }) + before(async () => { + ipfsA = await common.setup() + ipfsB = await common.setup() + await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should send the specified number of packets', (done) => { + it('should send the specified number of packets', async () => { const count = 3 - ipfsA.ping(ipfsB.peerId.id, { count }, (err, responses) => { - expect(err).to.not.exist() - responses.forEach(expectIsPingResponse) - const pongs = responses.filter(isPong) - expect(pongs.length).to.equal(count) - done() - }) + const responses = await ipfsA.ping(ipfsB.peerId.id, { count }) + responses.forEach(expectIsPingResponse) + + const pongs = responses.filter(isPong) + expect(pongs.length).to.equal(count) }) - it('should fail when pinging a peer that is not available', (done) => { + it('should fail when pinging a peer that is not available', () => { const notAvailablePeerId = 'QmUmaEnH1uMmvckMZbh3yShaasvELPW4ZLPWnB4entMTEn' const count = 2 - ipfsA.ping(notAvailablePeerId, { count }, (err, responses) => { - expect(err).to.exist() - done() - }) + return expect(ipfsA.ping(notAvailablePeerId, { count })).to.eventually.be.rejected() }) - it('should fail when pinging an invalid peer Id', (done) => { + it('should fail when pinging an invalid peer Id', () => { const invalidPeerId = 'not a peer ID' const count = 2 - ipfsA.ping(invalidPeerId, { count }, (err, responses) => { - expect(err).to.exist() - done() - }) + + return expect(ipfsA.ping(invalidPeerId, { count })).to.eventually.be.rejected() }) }) } diff --git a/src/pubsub/ls.js b/src/pubsub/ls.js index 639aed5e..126be18b 100644 --- a/src/pubsub/ls.js +++ b/src/pubsub/ls.js @@ -3,7 +3,7 @@ const { getTopic } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') -const delay = require('../utils/delay') +const delay = require('delay') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -16,19 +16,8 @@ module.exports = (createCommon, options) => { let ipfs let subscribedTopics = [] - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) afterEach(async () => { @@ -39,7 +28,7 @@ module.exports = (createCommon, options) => { await delay(100) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should return an empty list when no topics are subscribed', async () => { const topics = await ipfs.pubsub.ls() diff --git a/src/pubsub/peers.js b/src/pubsub/peers.js index 44813023..01084d2d 100644 --- a/src/pubsub/peers.js +++ b/src/pubsub/peers.js @@ -1,12 +1,9 @@ /* eslint-env mocha */ 'use strict' -const parallel = require('async/parallel') -const { spawnNodesWithId } = require('../utils/spawn') const { waitForPeers, getTopic } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') -const { connect } = require('../utils/swarm') -const delay = require('../utils/delay') +const delay = require('delay') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -21,24 +18,17 @@ module.exports = (createCommon, options) => { let ipfs3 let subscribedTopics = [] - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(100 * 1000) + before(async () => { + ipfs1 = await common.setup() + ipfs2 = await common.setup() + ipfs3 = await common.setup() - common.setup((err, factory) => { - if (err) return done(err) - - spawnNodesWithId(3, factory, (err, nodes) => { - if (err) return done(err) - - ipfs1 = nodes[0] - ipfs2 = nodes[1] - ipfs3 = nodes[2] + const ipfs2Addr = ipfs2.peerId.addresses.find((a) => a.includes('127.0.0.1')) + const ipfs3Addr = ipfs3.peerId.addresses.find((a) => a.includes('127.0.0.1')) - done() - }) - }) + await ipfs1.swarm.connect(ipfs2Addr) + await ipfs1.swarm.connect(ipfs3Addr) + await ipfs2.swarm.connect(ipfs3Addr) }) afterEach(async () => { @@ -51,17 +41,7 @@ module.exports = (createCommon, options) => { await delay(100) }) - after((done) => common.teardown(done)) - - before((done) => { - const ipfs2Addr = ipfs2.peerId.addresses.find((a) => a.includes('127.0.0.1')) - const ipfs3Addr = ipfs3.peerId.addresses.find((a) => a.includes('127.0.0.1')) - - parallel([ - (cb) => connect(ipfs1, [ipfs2Addr, ipfs3Addr], cb), - (cb) => connect(ipfs2, ipfs3Addr, cb) - ], done) - }) + after(() => common.teardown()) it('should not error when not subscribed to a topic', async () => { const topic = getTopic() diff --git a/src/pubsub/publish.js b/src/pubsub/publish.js index 8c660c67..6eba0061 100644 --- a/src/pubsub/publish.js +++ b/src/pubsub/publish.js @@ -3,7 +3,7 @@ const hat = require('hat') const { getTopic } = require('./utils') -const { getDescribe, getIt, expect } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -15,22 +15,11 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should publish message from string', () => { const topic = getTopic() diff --git a/src/pubsub/subscribe.js b/src/pubsub/subscribe.js index e04bb0a5..be5093fc 100644 --- a/src/pubsub/subscribe.js +++ b/src/pubsub/subscribe.js @@ -4,11 +4,9 @@ const pushable = require('it-pushable') const { collect } = require('streaming-iterables') -const { spawnNodesWithId } = require('../utils/spawn') const { waitForPeers, getTopic } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') -const { connect } = require('../utils/swarm') -const delay = require('../utils/delay') +const delay = require('delay') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -23,23 +21,9 @@ module.exports = (createCommon, options) => { let topic let subscribedTopics = [] - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(100 * 1000) - - common.setup((err, factory) => { - if (err) return done(err) - - spawnNodesWithId(2, factory, (err, nodes) => { - if (err) return done(err) - - ipfs1 = nodes[0] - ipfs2 = nodes[1] - - done() - }) - }) + before(async () => { + ipfs1 = await common.setup() + ipfs2 = await common.setup() }) beforeEach(() => { @@ -57,7 +41,7 @@ module.exports = (createCommon, options) => { await delay(100) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) describe('single node', () => { it('should subscribe to one topic', async () => { @@ -155,7 +139,7 @@ module.exports = (createCommon, options) => { }) describe('multiple connected nodes', () => { - before((done) => { + before(() => { if (ipfs1.pubsub.setMaxListeners) { ipfs1.pubsub.setMaxListeners(100) } @@ -165,7 +149,7 @@ module.exports = (createCommon, options) => { } const ipfs2Addr = ipfs2.peerId.addresses.find((a) => a.includes('127.0.0.1')) - connect(ipfs1, ipfs2Addr, done) + return ipfs1.swarm.connect(ipfs2Addr) }) it('should receive messages from a different node', async () => { diff --git a/src/pubsub/unsubscribe.js b/src/pubsub/unsubscribe.js index f6b8ce81..dd0be0f9 100644 --- a/src/pubsub/unsubscribe.js +++ b/src/pubsub/unsubscribe.js @@ -4,7 +4,7 @@ const { isBrowser, isWebWorker, isElectronRenderer } = require('ipfs-utils/src/env') const { getTopic } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') -const delay = require('../utils/delay') +const delay = require('delay') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -16,22 +16,11 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) // Browser/worker has max ~5 open HTTP requests to the same origin const count = isBrowser || isWebWorker || isElectronRenderer ? 5 : 10 diff --git a/src/pubsub/utils.js b/src/pubsub/utils.js index f6721c8c..80b53c65 100644 --- a/src/pubsub/utils.js +++ b/src/pubsub/utils.js @@ -1,7 +1,7 @@ 'use strict' const hat = require('hat') -const delay = require('../utils/delay') +const delay = require('delay') async function waitForPeers (ipfs, topic, peersToWait, waitForMs) { const start = Date.now() diff --git a/src/repo/gc.js b/src/repo/gc.js index e2d0158b..11fded58 100644 --- a/src/repo/gc.js +++ b/src/repo/gc.js @@ -9,38 +9,27 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.repo.gc', () => { + describe('.repo.gc', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should run garbage collection', (done) => { - ipfs.repo.gc((err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - done() - }) - }) + it('should run garbage collection', async () => { + const res = await ipfs.add(Buffer.from('apples')) + + const pinset = await ipfs.pin.ls() + expect(pinset.map((obj) => obj.hash)).includes(res[0].hash) + + await ipfs.pin.rm(res[0].hash) + await ipfs.repo.gc() - it('should run garbage collection (promised)', () => { - return ipfs.repo.gc().then((res) => { - expect(res).to.exist() - }) + const finalPinset = await ipfs.pin.ls() + expect(finalPinset.map((obj) => obj.hash)).not.includes(res[0].hash) }) it('should clean up unpinned data', async () => { diff --git a/src/repo/stat.js b/src/repo/stat.js index d313a999..662c2403 100644 --- a/src/repo/stat.js +++ b/src/repo/stat.js @@ -2,44 +2,26 @@ 'use strict' const { expectIsRepo } = require('../stats/utils') -const { getDescribe, getIt, expect } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) const common = createCommon() - describe('.repo.stat', () => { + describe('.repo.stat', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) - - it('should get repo stats', (done) => { - ipfs.repo.stat((err, res) => { - expectIsRepo(err, res) - done() - }) - }) + after(() => common.teardown()) - it('should get repo stats (promised)', () => { - return ipfs.repo.stat().then((res) => { - expectIsRepo(null, res) - }) + it('should get repo stats', async () => { + const res = await ipfs.repo.stat() + expectIsRepo(null, res) }) }) } diff --git a/src/repo/version.js b/src/repo/version.js index 20c012a5..99c08432 100644 --- a/src/repo/version.js +++ b/src/repo/version.js @@ -8,38 +8,19 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.repo.version', () => { + describe('.repo.version', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) - - it('should get the repo version', (done) => { - ipfs.repo.version((err, version) => { - expect(err).to.not.exist() - expect(version).to.exist() - done() - }) - }) + after(() => common.teardown()) - it('should get the repo version (promised)', () => { - return ipfs.repo.version().then((version) => { - expect(version).to.exist() - }) + it('should get the repo version', async () => { + const version = await ipfs.repo.version() + expect(version).to.exist() }) }) } diff --git a/src/stats/bitswap.js b/src/stats/bitswap.js index b0e57f38..37a41828 100644 --- a/src/stats/bitswap.js +++ b/src/stats/bitswap.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const { getDescribe, getIt, expect } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') const { expectIsBitswap } = require('./utils') module.exports = (createCommon, options) => { @@ -12,34 +12,15 @@ module.exports = (createCommon, options) => { describe('.stats.bitswap', () => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) - - it('should get bitswap stats', (done) => { - ipfs.stats.bitswap((err, res) => { - expectIsBitswap(err, res) - done() - }) - }) + after(() => common.teardown()) - it('should get bitswap stats (promised)', () => { - return ipfs.stats.bitswap().then((res) => { - expectIsBitswap(null, res) - }) + it('should get bitswap stats', async () => { + const res = await ipfs.stats.bitswap() + expectIsBitswap(null, res) }) }) } diff --git a/src/stats/bw-pull-stream.js b/src/stats/bw-pull-stream.js index 000525c5..6d3c3933 100644 --- a/src/stats/bw-pull-stream.js +++ b/src/stats/bw-pull-stream.js @@ -2,44 +2,29 @@ 'use strict' const { expectIsBandwidth } = require('./utils') -const pull = require('pull-stream') -const { getDescribe, getIt, expect } = require('../utils/mocha') +const pullToPromise = require('pull-to-promise') +const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) const common = createCommon() - describe('.stats.bwPullStream', () => { + describe('.stats.bwPullStream', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should get bandwidth stats over pull stream', (done) => { + it('should get bandwidth stats over pull stream', async () => { const stream = ipfs.stats.bwPullStream() - pull( - stream, - pull.collect((err, data) => { - expectIsBandwidth(err, data[0]) - done() - }) - ) + const data = await pullToPromise.any(stream) + expectIsBandwidth(null, data[0]) }) }) } diff --git a/src/stats/bw-readable-stream.js b/src/stats/bw-readable-stream.js index e8bc490b..6ab0c711 100644 --- a/src/stats/bw-readable-stream.js +++ b/src/stats/bw-readable-stream.js @@ -2,41 +2,30 @@ 'use strict' const { expectIsBandwidth } = require('./utils') -const { getDescribe, getIt, expect } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') +const getStream = require('get-stream') module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) const common = createCommon() - describe('.stats.bwReadableStream', () => { + describe('.stats.bwReadableStream', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should get bandwidth stats over readable stream', (done) => { + it('should get bandwidth stats over readable stream', async () => { const stream = ipfs.stats.bwReadableStream() - stream.once('data', (data) => { - expectIsBandwidth(null, data) - stream.destroy() - done() - }) + const [data] = await getStream.array(stream) + + expectIsBandwidth(null, data) }) }) } diff --git a/src/stats/bw.js b/src/stats/bw.js index c851f4f2..13937c77 100644 --- a/src/stats/bw.js +++ b/src/stats/bw.js @@ -2,44 +2,26 @@ 'use strict' const { expectIsBandwidth } = require('./utils') -const { getDescribe, getIt, expect } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) const common = createCommon() - describe('.stats.bw', () => { + describe('.stats.bw', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) - - it('should get bandwidth stats', function (done) { - ipfs.stats.bw((err, res) => { - expectIsBandwidth(err, res) - done() - }) - }) + after(() => common.teardown()) - it('should get bandwidth stats (promised)', () => { - return ipfs.stats.bw().then((res) => { - expectIsBandwidth(null, res) - }) + it('should get bandwidth stats ', async () => { + const res = await ipfs.stats.bw() + expectIsBandwidth(null, res) }) }) } diff --git a/src/stats/repo.js b/src/stats/repo.js index 77b85e3e..79244930 100644 --- a/src/stats/repo.js +++ b/src/stats/repo.js @@ -2,44 +2,26 @@ 'use strict' const { expectIsRepo } = require('./utils') -const { getDescribe, getIt, expect } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) const common = createCommon() - describe('.stats.repo', () => { + describe('.stats.repo', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) - - it('should get repo stats', (done) => { - ipfs.stats.repo((err, res) => { - expectIsRepo(err, res) - done() - }) - }) + after(() => common.teardown()) - it('should get repo stats (promised)', () => { - return ipfs.stats.repo().then((res) => { - expectIsRepo(null, res) - }) + it('should get repo stats', async () => { + const res = await ipfs.stats.repo() + expectIsRepo(null, res) }) }) } diff --git a/src/swarm/addrs.js b/src/swarm/addrs.js index 59ec826d..03196577 100644 --- a/src/swarm/addrs.js +++ b/src/swarm/addrs.js @@ -23,20 +23,11 @@ module.exports = (createCommon, options) => { after(() => common.teardown()) - it('should get a list of node addresses', (done) => { - ipfsA.swarm.addrs((err, peerInfos) => { - expect(err).to.not.exist() - expect(peerInfos).to.not.be.empty() - expect(peerInfos).to.be.an('array') - peerInfos.forEach(m => expect(PeerInfo.isPeerInfo(m)).to.be.true()) - done() - }) - }) - - it('should get a list of node addresses (promised)', () => { - return ipfsA.swarm.addrs().then((peerInfos) => { - expect(peerInfos).to.have.length.above(0) - }) + it('should get a list of node addresses', async () => { + const peerInfos = await ipfsA.swarm.addrs() + expect(peerInfos).to.not.be.empty() + expect(peerInfos).to.be.an('array') + peerInfos.forEach(m => expect(PeerInfo.isPeerInfo(m)).to.be.true()) }) }) } diff --git a/src/swarm/connect.js b/src/swarm/connect.js index 2e0a498d..9de40265 100644 --- a/src/swarm/connect.js +++ b/src/swarm/connect.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const { getDescribe, getIt } = require('../utils/mocha') +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -20,12 +20,16 @@ module.exports = (createCommon, options) => { after(() => common.teardown()) - it('should connect to a peer', (done) => { - ipfsA.swarm.connect(ipfsB.peerId.addresses[0], done) - }) + it('should connect to a peer', async () => { + let peers + + peers = await ipfsA.swarm.peers() + expect(peers).to.have.length(0) + + await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) - it('should connect to a peer (promised)', () => { - return ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) + peers = await ipfsA.swarm.peers() + expect(peers).to.have.length.above(0) }) }) } diff --git a/src/swarm/disconnect.js b/src/swarm/disconnect.js index fa4edab9..51551001 100644 --- a/src/swarm/disconnect.js +++ b/src/swarm/disconnect.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const { getDescribe, getIt } = require('../utils/mocha') +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -22,12 +22,16 @@ module.exports = (createCommon, options) => { after(() => common.teardown()) - it('should disconnect from a peer', (done) => { - ipfsA.swarm.disconnect(ipfsB.peerId.addresses[0], done) - }) + it('should disconnect from a peer', async () => { + let peers + + peers = await ipfsA.swarm.peers() + expect(peers).to.have.length.above(0) + + await ipfsA.swarm.disconnect(ipfsB.peerId.addresses[0]) - it('should disconnect from a peer (promised)', () => { - return ipfsA.swarm.disconnect(ipfsB.peerId.addresses[0]) + peers = await ipfsA.swarm.peers() + expect(peers).to.have.length(0) }) }) } diff --git a/src/swarm/local-addrs.js b/src/swarm/local-addrs.js index e9506c00..0c534e6e 100644 --- a/src/swarm/local-addrs.js +++ b/src/swarm/local-addrs.js @@ -19,18 +19,9 @@ module.exports = (createCommon, options) => { after(() => common.teardown()) - it('should list local addresses the node is listening on', (done) => { - ipfs.swarm.localAddrs((err, multiaddrs) => { - expect(err).to.not.exist() - expect(multiaddrs).to.have.length.above(0) - done() - }) - }) - - it('should list local addresses the node is listening on (promised)', () => { - return ipfs.swarm.localAddrs().then((multiaddrs) => { - expect(multiaddrs).to.have.length.above(0) - }) + it('should list local addresses the node is listening on', async () => { + const multiaddrs = await ipfs.swarm.localAddrs() + expect(multiaddrs).to.have.length.above(0) }) }) } diff --git a/src/swarm/peers.js b/src/swarm/peers.js index 576dd87f..f37471c9 100644 --- a/src/swarm/peers.js +++ b/src/swarm/peers.js @@ -21,67 +21,44 @@ module.exports = (createCommon, options) => { ipfsA = await common.setup() ipfsB = await common.setup() await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) + await delay(60 * 1000) // wait for open streams in the connection available }) after(() => common.teardown()) - it('should list peers this node is connected to', (done) => { - ipfsA.swarm.peers((err, peers) => { - expect(err).to.not.exist() - expect(peers).to.have.length.above(0) + it('should list peers this node is connected to', async () => { + const peers = await ipfsA.swarm.peers() + expect(peers).to.have.length.above(0) - const peer = peers[0] + const peer = peers[0] - expect(peer).to.have.a.property('addr') - expect(multiaddr.isMultiaddr(peer.addr)).to.equal(true) - expect(peer).to.have.a.property('peer') - expect(PeerId.isPeerId(peer.peer)).to.equal(true) - expect(peer).to.not.have.a.property('latency') + expect(peer).to.have.a.property('addr') + expect(multiaddr.isMultiaddr(peer.addr)).to.equal(true) + expect(peer).to.have.a.property('peer') + expect(PeerId.isPeerId(peer.peer)).to.equal(true) + expect(peer).to.not.have.a.property('latency') - // only available in 0.4.5 - // expect(peer).to.have.a.property('muxer') - // expect(peer).to.not.have.a.property('streams') - - done() - }) + /* TODO: These assertions must be uncommented as soon as + https://github.com/ipfs/js-ipfs/issues/2601 gets resolved */ + // expect(peer).to.have.a.property('muxer') + // expect(peer).to.not.have.a.property('streams') }) - it('should list peers this node is connected to (promised)', () => { - return ipfsA.swarm.peers().then((peers) => { - expect(peers).to.have.length.above(0) - - const peer = peers[0] - - expect(peer).to.have.a.property('addr') - expect(multiaddr.isMultiaddr(peer.addr)).to.equal(true) - expect(peer).to.have.a.property('peer') - expect(PeerId.isPeerId(peer.peer)).to.equal(true) - expect(peer).to.not.have.a.property('latency') - - // only available in 0.4.5 - // expect(peer).to.have.a.property('muxer') - // expect(peer).to.not.have.a.property('streams') - }) - }) - - it('should list peers this node is connected to with verbose option', (done) => { - ipfsA.swarm.peers({ verbose: true }, (err, peers) => { - expect(err).to.not.exist() - expect(peers).to.have.length.above(0) - - const peer = peers[0] - expect(peer).to.have.a.property('addr') - expect(multiaddr.isMultiaddr(peer.addr)).to.equal(true) - expect(peer).to.have.a.property('peer') - expect(peer).to.have.a.property('latency') - expect(peer.latency).to.match(/n\/a|[0-9]+m?s/) // n/a or 3ms or 3s - - // Only available in 0.4.5 - // expect(peer).to.have.a.property('muxer') - // expect(peer).to.have.a.property('streams') - - done() - }) + it('should list peers this node is connected to with verbose option', async () => { + const peers = await ipfsA.swarm.peers({ verbose: true }) + expect(peers).to.have.length.above(0) + + const peer = peers[0] + expect(peer).to.have.a.property('addr') + expect(multiaddr.isMultiaddr(peer.addr)).to.equal(true) + expect(peer).to.have.a.property('peer') + expect(peer).to.have.a.property('latency') + expect(peer.latency).to.match(/n\/a|[0-9]+[mµ]?s/) // n/a or 3ms or 3µs or 3s + + /* TODO: These assertions must be uncommented as soon as + https://github.com/ipfs/js-ipfs/issues/2601 gets resolved */ + // expect(peer).to.have.a.property('muxer') + // expect(peer).to.have.a.property('streams') }) function getConfig (addrs) { @@ -105,8 +82,8 @@ module.exports = (createCommon, options) => { it('should list peers only once', async () => { const config = getConfig(['/ip4/127.0.0.1/tcp/0']) - const nodeA = await common.setup({}, { config }) - const nodeB = await common.setup({}, { config }) + const nodeA = await common.setup({ spawnOptions: { config } }) + const nodeB = await common.setup({ spawnOptions: { config } }) await nodeA.swarm.connect(nodeB.peerId.addresses[0]) await delay(1000) const peersA = await nodeA.swarm.peers() @@ -125,8 +102,8 @@ module.exports = (createCommon, options) => { '/ip4/127.0.0.1/tcp/26545', '/ip4/127.0.0.1/tcp/26546' ]) - const nodeA = await common.setup({}, { configA }) - const nodeB = await common.setup({}, { configB }) + const nodeA = await common.setup({ spawnOptions: { config: configA } }) + const nodeB = await common.setup({ spawnOptions: { config: configB } }) await nodeA.swarm.connect(nodeB.peerId.addresses[0]) await delay(1000) const peersA = await nodeA.swarm.peers() diff --git a/src/utils/delay.js b/src/utils/delay.js deleted file mode 100644 index 0295cb6c..00000000 --- a/src/utils/delay.js +++ /dev/null @@ -1,20 +0,0 @@ -'use strict' - -/** - * Promise version of setTimeout - * @example - * ```js - * async function something() { - * console.log("this might take some time...."); - * await delay(5000); - * console.log("done!") - * } - * - * something(); - * ``` - * @param {number} ms - * @return {Promise} - */ -const delay = ms => new Promise(resolve => setTimeout(resolve, ms)) - -module.exports = delay diff --git a/src/utils/spawn.js b/src/utils/spawn.js deleted file mode 100644 index 792d482b..00000000 --- a/src/utils/spawn.js +++ /dev/null @@ -1,37 +0,0 @@ -'use strict' - -const waterfall = require('async/waterfall') -const timesSeries = require('async/timesSeries') -const map = require('async/map') - -function identify (node, cb) { - node.id((err, id) => { - if (err) return cb(err) - node.peerId = id - cb(null, node) - }) -} - -// Spawn a node, get it's id and set it as `peerId` on the node -function spawnNodeWithId (factory, callback) { - waterfall([(cb) => factory.spawnNode(cb), identify], callback) -} - -exports.spawnNodeWithId = spawnNodeWithId - -// Spawn n nodes -function spawnNodes (n, factory, callback) { - timesSeries(n, (_, cb) => factory.spawnNode(cb), callback) -} - -exports.spawnNodes = spawnNodes - -// Spawn n nodes, getting their id's and setting them as `peerId` on the nodes -function spawnNodesWithId (n, factory, callback) { - spawnNodes(n, factory, (err, nodes) => { - if (err) return callback(err) - map(nodes, identify, callback) - }) -} - -exports.spawnNodesWithId = spawnNodesWithId diff --git a/src/utils/swarm.js b/src/utils/swarm.js deleted file mode 100644 index b36dc7df..00000000 --- a/src/utils/swarm.js +++ /dev/null @@ -1,20 +0,0 @@ -'use strict' - -const eachSeries = require('async/eachSeries') - -function connect (fromNode, toAddrs, cb) { - if (!Array.isArray(toAddrs)) { - toAddrs = [toAddrs] - } - - // FIXME ??? quick connections to different nodes sometimes cause no - // connection and no error, hence serialize connections and pause between - eachSeries(toAddrs, (toAddr, cb) => { - fromNode.swarm.connect(toAddr, (err) => { - if (err) return cb(err) - setTimeout(cb, 300) - }) - }, cb) -} - -module.exports.connect = connect