From d5c07588405c630f7d5a9617238232b52c84c22a Mon Sep 17 00:00:00 2001 From: Dirk McCormick Date: Sun, 17 Mar 2019 20:23:47 -0400 Subject: [PATCH] chore: callbacks -> async / await BREAKING CHANGE: All places in the API that used callbacks are now replaced with async/await --- examples/full-s3-repo/index.js | 49 ++--- examples/full-s3-repo/package.json | 1 - examples/full-s3-repo/s3-lock.js | 80 ++++---- package.json | 13 +- src/index.js | 314 ++++++++++------------------- src/s3-repo.js | 18 +- test/index.spec.js | 117 ++++++----- test/utils/s3-mock.js | 40 ++-- 8 files changed, 264 insertions(+), 368 deletions(-) diff --git a/examples/full-s3-repo/index.js b/examples/full-s3-repo/index.js index 7168ed3..1dfac22 100644 --- a/examples/full-s3-repo/index.js +++ b/examples/full-s3-repo/index.js @@ -20,36 +20,31 @@ let node = new IPFS({ console.log('Start the node') // Test out the repo by sending and fetching some data -node.on('ready', () => { +node.on('ready', async () => { console.log('Ready') - node.version() - .then((version) => { - console.log('Version:', version.version) - }) + + try { + const version = await node.version() + console.log('Version:', version.version) + // Once we have the version, let's add a file to IPFS - .then(() => { - return node.add({ - path: 'data.txt', - content: Buffer.from(require('crypto').randomBytes(1024 * 25)) - }) + const filesAdded = await node.add({ + path: 'data.txt', + content: Buffer.from(require('crypto').randomBytes(1024 * 25)) }) + console.log('\nAdded file:', filesAdded[0].path, filesAdded[0].hash) + // Log out the added files metadata and cat the file from IPFS - .then((filesAdded) => { - console.log('\nAdded file:', filesAdded[0].path, filesAdded[0].hash) - return node.cat(filesAdded[0].hash) - }) + const data = await node.cat(filesAdded[0].hash) + // Print out the files contents to console - .then((data) => { - console.log(`\nFetched file content containing ${data.byteLength} bytes`) - }) - // Log out the error, if there is one - .catch((err) => { - console.log('File Processing Error:', err) - }) - // After everything is done, shut the node down - // We don't need to worry about catching errors here - .then(() => { - console.log('\n\nStopping the node') - return node.stop() - }) + console.log(`\nFetched file content containing ${data.byteLength} bytes`) + } catch (err) { + // Log out the error + console.log('File Processing Error:', err) + } + // After everything is done, shut the node down + // We don't need to worry about catching errors here + console.log('\n\nStopping the node') + return node.stop() }) diff --git a/examples/full-s3-repo/package.json b/examples/full-s3-repo/package.json index 204d0bf..d388545 100644 --- a/examples/full-s3-repo/package.json +++ b/examples/full-s3-repo/package.json @@ -10,7 +10,6 @@ "author": "", "license": "ISC", "dependencies": { - "async": "^2.6.2", "aws-sdk": "^2.402.0", "datastore-s3": "../../", "ipfs": "~0.34.4", diff --git a/examples/full-s3-repo/s3-lock.js b/examples/full-s3-repo/s3-lock.js index 69db28c..3d117b7 100644 --- a/examples/full-s3-repo/s3-lock.js +++ b/examples/full-s3-repo/s3-lock.js @@ -27,26 +27,24 @@ class S3Lock { * Creates the lock. This can be overriden to customize where the lock should be created * * @param {string} dir - * @param {function(Error, LockCloser)} callback - * @returns {void} + * @returns {Promise} */ - lock (dir, callback) { + async lock (dir) { const lockPath = this.getLockfilePath(dir) - this.locked(dir, (err, alreadyLocked) => { - if (err || alreadyLocked) { - return callback(new Error('The repo is already locked')) - } - - // There's no lock yet, create one - this.s3.put(lockPath, Buffer.from(''), (err, data) => { - if (err) { - return callback(err, null) - } + let alreadyLocked, err + try { + alreadyLocked = await this.locked(dir) + } catch (e) { + err = e + } + if (err || alreadyLocked) { + return callback(new Error('The repo is already locked')) + } - callback(null, this.getCloser(lockPath)) - }) - }) + // There's no lock yet, create one + const data = await this.s3.put(lockPath, Buffer.from('')).promise() + return this.getCloser(lockPath) } /** @@ -61,21 +59,20 @@ class S3Lock { * Removes the lock. This can be overriden to customize how the lock is removed. This * is important for removing any created locks. * - * @param {function(Error)} callback - * @returns {void} + * @returns {Promise} */ - close: (callback) => { - this.s3.delete(lockPath, (err) => { - if (err && err.statusCode !== 404) { - return callback(err) + async close: () => { + try { + await this.s3.delete(lockPath).promise() + } catch (err) { + if (err.statusCode !== 404) { + throw err } - - callback(null) - }) + } } } - const cleanup = (err) => { + const cleanup = async (err) => { if (err instanceof Error) { console.log('\nAn Uncaught Exception Occurred:\n', err) } else if (err) { @@ -84,10 +81,13 @@ class S3Lock { console.log('\nAttempting to cleanup gracefully...') - closer.close(() => { - console.log('Cleanup complete, exiting.') - process.exit() - }) + try { + await closer.close() + } catch (e) { + console.log('Caught error cleaning up: %s', e.message) + } + console.log('Cleanup complete, exiting.') + process.exit() } // listen for graceful termination @@ -103,19 +103,19 @@ class S3Lock { * Calls back on whether or not a lock exists. Override this method to customize how the check is made. * * @param {string} dir - * @param {function(Error, boolean)} callback - * @returns {void} + * @returns {Promise} */ - locked (dir, callback) { - this.s3.get(this.getLockfilePath(dir), (err, data) => { - if (err && err.code === 'ERR_NOT_FOUND') { - return callback(null, false) - } else if (err) { - return callback(err) + async locked (dir) { + try { + await this.s3.get(this.getLockfilePath(dir)).promise() + } catch (err) { + if (err.code === 'ERR_NOT_FOUND') { + return false } + throw err + } - callback(null, true) - }) + return true } } diff --git a/package.json b/package.json index dedc94b..cc6dd13 100644 --- a/package.json +++ b/package.json @@ -37,12 +37,9 @@ }, "homepage": "https://github.com/ipfs/js-datastore-s3#readme", "dependencies": { - "async": "^2.6.2", - "datastore-core": "~0.6.0", - "interface-datastore": "~0.6.0", - "once": "^1.4.0", - "pull-defer": "~0.2.3", - "pull-stream": "^3.6.9", + "datastore-core": "zcstarr/js-datastore-core", + "interface-datastore": "git://github.com/ipfs/interface-datastore.git#refactor/async-iterators", + "streaming-iterables": "^4.0.2", "upath": "^1.1.0" }, "devDependencies": { @@ -56,8 +53,8 @@ "stand-in": "^4.2.0" }, "peerDependencies": { - "ipfs-repo": "0.x", - "aws-sdk": "2.x" + "aws-sdk": "2.x", + "ipfs-repo": "0.x" }, "contributors": [ "Jacob Heun ", diff --git a/src/index.js b/src/index.js index ff995a8..742d29e 100644 --- a/src/index.js +++ b/src/index.js @@ -4,21 +4,18 @@ /* :: import type {Batch, Query, QueryResult, Callback} from 'interface-datastore' */ const assert = require('assert') const path = require('upath') -const nextTick = require('async/nextTick') -const once = require('once') -const each = require('async/each') -const waterfall = require('async/series') -const asyncFilter = require('interface-datastore').utils.asyncFilter -const asyncSort = require('interface-datastore').utils.asyncSort +const { + filter, + map, + take +} = require('streaming-iterables') const IDatastore = require('interface-datastore') +const sortAll = IDatastore.utils.sortAll const Key = IDatastore.Key const Errors = IDatastore.Errors const createRepo = require('./s3-repo') -const Deferred = require('pull-defer') -const pull = require('pull-stream') - /* :: export type S3DSInputOptions = { s3: S3Instance, createIfMissing: ?boolean @@ -86,90 +83,79 @@ class S3Datastore { * * @param {Key} key * @param {Buffer} val - * @param {function(Error)} callback - * @returns {void} + * @returns {Promise} */ - put (key /* : Key */, val /* : Buffer */, callback /* : Callback */) /* : void */ { - callback = once(callback) - this.opts.s3.upload({ - Key: this._getFullKey(key), - Body: val - }, (err, data) => { - if (err && err.code === 'NoSuchBucket' && this.createIfMissing) { - return this.opts.s3.createBucket({}, (err) => { - if (err) return callback(err) - nextTick(() => this.put(key, val, callback)) - }) - } else if (err) { - return callback(Errors.dbWriteFailedError(err)) + async put (key /* : Key */, val /* : Buffer */) /* : Promise */ { + try { + await this.opts.s3.upload({ + Key: this._getFullKey(key), + Body: val + }).promise() + } catch (err) { + if (err.code === 'NoSuchBucket' && this.createIfMissing) { + await this.opts.s3.createBucket({}).promise() + return this.put(key, val) } - - callback() - }) + throw Errors.dbWriteFailedError(err) + } } /** * Read from s3. * * @param {Key} key - * @param {function(Error, Buffer)} callback - * @returns {void} + * @returns {Promise} */ - get (key /* : Key */, callback /* : Callback */) /* : void */ { - callback = once(callback) - this.opts.s3.getObject({ - Key: this._getFullKey(key) - }, (err, data) => { - if (err && err.statusCode === 404) { - return callback(Errors.notFoundError(err)) - } else if (err) { - return callback(err) - } + async get (key /* : Key */) /* : Promise */ { + try { + const data = await this.opts.s3.getObject({ + Key: this._getFullKey(key) + }).promise() // If a body was returned, ensure it's a Buffer - callback(null, data.Body ? Buffer.from(data.Body) : null) - }) + return data.Body ? Buffer.from(data.Body) : null + } catch (err) { + if (err.statusCode === 404) { + throw Errors.notFoundError(err) + } + throw err + } } /** * Check for the existence of the given key. * * @param {Key} key - * @param {function(Error, bool)} callback - * @returns {void} + * @returns {Promise} */ - has (key /* : Key */, callback /* : Callback */) /* : void */ { - callback = once(callback) - this.opts.s3.headObject({ - Key: this._getFullKey(key) - }, (err, data) => { - if (err && err.code === 'NotFound') { - return callback(null, false) - } else if (err) { - return callback(err, false) + async has (key /* : Key */) /* : Promise */ { + try { + await this.opts.s3.headObject({ + Key: this._getFullKey(key) + }).promise() + return true + } catch (err) { + if (err.code === 'NotFound') { + return false } - - callback(null, true) - }) + throw err + } } /** * Delete the record under the given key. * * @param {Key} key - * @param {function(Error)} callback - * @returns {void} + * @returns {Promise} */ - delete (key /* : Key */, callback /* : Callback */) /* : void */ { - callback = once(callback) - this.opts.s3.deleteObject({ - Key: this._getFullKey(key) - }, (err) => { - if (err) { - return callback(Errors.dbDeleteFailedError(err)) - } - callback() - }) + async delete (key /* : Key */) /* : Promise */ { + try { + await this.opts.s3.deleteObject({ + Key: this._getFullKey(key) + }).promise() + } catch (err) { + throw Errors.dbDeleteFailedError(err) + } } /** @@ -187,16 +173,10 @@ class S3Datastore { delete (key /* : Key */) /* : void */ { deletes.push(key) }, - commit: (callback /* : (err: ?Error) => void */) => { - callback = once(callback) - waterfall([ - (cb) => each(puts, (p, _cb) => { - this.put(p.key, p.value, _cb) - }, cb), - (cb) => each(deletes, (key, _cb) => { - this.delete(key, _cb) - }, cb) - ], (err) => callback(err)) + commit: () /* : Promise */ => { + const putOps = puts.map((p) => this.put(p.key, p.value)) + const delOps = deletes.map((key) => this.delete(key)) + return Promise.all(putOps.concat(delOps)) } } } @@ -204,69 +184,28 @@ class S3Datastore { /** * Recursively fetches all keys from s3 * @param {Object} params - * @param {Array} keys - * @param {function} callback - * @returns {void} + * @returns {Iterator} */ - _listKeys (params /* : { Prefix: string, StartAfter: ?string } */, keys /* : Array */, callback /* : Callback */) { - if (typeof callback === 'undefined') { - callback = keys - keys = [] + async * _listKeys (params /* : { Prefix: string, StartAfter: ?string } */) { + let data + try { + data = await this.opts.s3.listObjectsV2(params).promise() + } catch (err) { + throw new Error(err.code) } - callback = once(callback) - this.opts.s3.listObjectsV2(params, (err, data) => { - if (err) { - return callback(new Error(err.code)) - } - - data.Contents.forEach((d) => { - // Remove the path from the key - keys.push(new Key(d.Key.slice(this.path.length), false)) - }) - - // If we didnt get all records, recursively query - if (data.isTruncated) { - // If NextMarker is absent, use the key from the last result - params.StartAfter = data.Contents[data.Contents.length - 1].Key - - // recursively fetch keys - return this._listKeys(params, keys, callback) - } - - callback(err, keys) - }) - } - - /** - * Returns an iterator for fetching objects from s3 by their key - * @param {Array} keys - * @param {Boolean} keysOnly Whether or not only keys should be returned - * @returns {Iterator} - */ - _getS3Iterator (keys /* : Array */, keysOnly /* : boolean */) { - let count = 0 - - return { - next: (callback/* : Callback */) => { - callback = once(callback) - - // Check if we're done - if (count >= keys.length) { - return callback(null, null, null) - } - - let currentKey = keys[count++] + for (const d of data.Contents) { + // Remove the path from the key + yield new Key(d.Key.slice(this.path.length), false) + } - if (keysOnly) { - return callback(null, currentKey, null) - } + // If we didnt get all records, recursively query + if (data.isTruncated) { + // If NextMarker is absent, use the key from the last result + params.StartAfter = data.Contents[data.Contents.length - 1].Key - // Fetch the object Buffer from s3 - this.get(currentKey, (err, data) => { - callback(err, currentKey, data) - }) - } + // recursively fetch keys + yield * this._listKeys(params) } } @@ -274,113 +213,78 @@ class S3Datastore { * Query the store. * * @param {Object} q - * @returns {PullStream} + * @returns {Iterable} */ query (q /* : Query */) /* : QueryResult */ { const prefix = path.join(this.path, q.prefix || '') - let deferred = Deferred.source() - let iterator + let values = true + if (q.keysOnly != null) { + values = !q.keysOnly + } + // Get all the keys via list object, recursively as needed const params /* : Object */ = { Prefix: prefix } + let it = this._listKeys(params) - // this gets called recursively, the internals need to iterate - const rawStream = (end, callback) => { - callback = once(callback) - - if (end) { - return callback(end) - } - - iterator.next((err, key, value) => { - if (err) { - return callback(err) - } - - // If the iterator is done, declare the stream done - if (err === null && key === null && value === null) { - return callback(true) // eslint-disable-line standard/no-callback-literal - } - - const res /* : Object */ = { - key: key - } - - if (value) { - res.value = value - } - - callback(null, res) - }) + if (q.prefix != null) { + it = filter(k => k.toString().startsWith(q.prefix), it) } - // Get all the keys via list object, recursively as needed - this._listKeys(params, [], (err, keys) => { - if (err) { - return deferred.abort(err) + it = map(async (key) => { + const res /* : QueryEntry */ = { key } + if (values) { + // Fetch the object Buffer from s3 + res.value = await this.get(key) } + return res + }, it) - iterator = this._getS3Iterator(keys, q.keysOnly || false) - - deferred.resolve(rawStream) - }) - - // Use a deferred pull stream source, as async operations need to occur before the - // pull stream begins - let tasks = [deferred] - - if (q.filters != null) { - tasks = tasks.concat(q.filters.map(f => asyncFilter(f))) + if (Array.isArray(q.filters)) { + it = q.filters.reduce((it, f) => filter(f, it), it) } - if (q.orders != null) { - tasks = tasks.concat(q.orders.map(o => asyncSort(o))) + if (Array.isArray(q.orders)) { + it = q.orders.reduce((it, f) => sortAll(it, f), it) } if (q.offset != null) { let i = 0 - tasks.push(pull.filter(() => i++ >= q.offset)) + it = filter(() => i++ >= q.offset, it) } if (q.limit != null) { - tasks.push(pull.take(q.limit)) + it = take(q.limit, it) } - return pull.apply(null, tasks) + return it } /** * This will check the s3 bucket to ensure access and existence * - * @param {function(Error)} callback - * @returns {void} + * @returns {Promise} */ - open (callback /* : Callback */) /* : void */ { - callback = once(callback) - this.opts.s3.headObject({ - Key: this.path - }, (err, data) => { - if (err && err.statusCode === 404) { - return this.put(new Key('/', false), Buffer.from(''), callback) + async open () /* : Promise */ { + try { + await this.opts.s3.headObject({ + Key: this.path + }).promise() + } catch (err) { + if (err.statusCode === 404) { + return this.put(new Key('/', false), Buffer.from('')) } - if (err) { - return callback(Errors.dbOpenFailedError(err)) - } - callback() - }) + throw Errors.dbOpenFailedError(err) + } } /** * Close the store. - * - * @param {function(Error)} callback - * @returns {void} */ - close (callback /* : (err: ?Error) => void */) /* : void */ { - nextTick(callback) + close () { } } diff --git a/src/s3-repo.js b/src/s3-repo.js index 6545ab0..486803b 100644 --- a/src/s3-repo.js +++ b/src/s3-repo.js @@ -6,19 +6,11 @@ const IPFSRepo = require('ipfs-repo') // A mock lock const notALock = { getLockfilePath: () => {}, - lock: (_, cb) => { - cb(null, notALock.getCloser()) - }, - getCloser: (_) => { - return { - close: (cb) => { - cb() - } - } - }, - locked: (_, cb) => { - cb(null, false) - } + lock: (_) => notALock.getCloser(), + getCloser: (_) => ({ + close: () => {} + }), + locked: (_) => false } /** diff --git a/test/index.spec.js b/test/index.spec.js index f72e751..de12075 100644 --- a/test/index.spec.js +++ b/test/index.spec.js @@ -11,6 +11,7 @@ const Key = require('interface-datastore').Key const S3 = require('aws-sdk').S3 const S3Mock = require('./utils/s3-mock') +const { s3Resolve, s3Reject, S3Error } = S3Mock const S3Store = require('../src') const { createRepo } = require('../src') @@ -35,19 +36,20 @@ describe('S3Datastore', () => { }) describe('put', () => { - it('should include the path in the key', (done) => { + it('should include the path in the key', async () => { const s3 = new S3({ params: { Bucket: 'my-ipfs-bucket' } }) const store = new S3Store('.ipfs/datastore', { s3 }) - standin.replace(s3, 'upload', function (stand, params, callback) { + standin.replace(s3, 'upload', function (stand, params) { expect(params.Key).to.equal('.ipfs/datastore/z/key') stand.restore() - callback(null) + return s3Resolve(null) }) - store.put(new Key('/z/key'), Buffer.from('test data'), done) + return store.put(new Key('/z/key'), Buffer.from('test data')) }) - it('should create the bucket when missing if createIfMissing is true', (done) => { + + it('should create the bucket when missing if createIfMissing is true', async () => { const s3 = new S3({ params: { Bucket: 'my-ipfs-bucket' } }) const store = new S3Store('.ipfs/datastore', { s3, createIfMissing: true }) @@ -56,130 +58,134 @@ describe('S3Datastore', () => { // 3. upload is then called a second time and it passes let bucketCreated = false - standin.replace(s3, 'upload', (stand, params, callback) => { + standin.replace(s3, 'upload', (stand, params) => { if (!bucketCreated) { - const err = { code: 'NoSuchBucket' } - return callback(err) + return s3Reject(new S3Error('NoSuchBucket')) } stand.restore() - return callback(null) + return s3Resolve(null) }) - standin.replace(s3, 'createBucket', (stand, params, callback) => { + standin.replace(s3, 'createBucket', (stand, params) => { bucketCreated = true stand.restore() - return callback() + return s3Resolve() }) - store.put(new Key('/z/key'), Buffer.from('test data'), done) + return store.put(new Key('/z/key'), Buffer.from('test data')) }) - it('should not create the bucket when missing if createIfMissing is false', (done) => { + + it('should not create the bucket when missing if createIfMissing is false', async () => { const s3 = new S3({ params: { Bucket: 'my-ipfs-bucket' } }) const store = new S3Store('.ipfs/datastore', { s3, createIfMissing: false }) let bucketCreated = false - standin.replace(s3, 'upload', (stand, params, callback) => { + standin.replace(s3, 'upload', (stand, params) => { if (!bucketCreated) { - const err = { code: 'NoSuchBucket' } - return callback(err) + return s3Reject(new S3Error('NoSuchBucket')) } stand.restore() - return callback(null) + return s3Resolve(null) }) - standin.replace(s3, 'createBucket', (stand, params, callback) => { + standin.replace(s3, 'createBucket', (stand, params) => { bucketCreated = true stand.restore() - return callback() + return s3Resolve() }) - store.put(new Key('/z/key'), Buffer.from('test data'), (err) => { + try { + await store.put(new Key('/z/key'), Buffer.from('test data')) + } catch (err) { expect(bucketCreated).to.equal(false) expect(err).to.have.property('code', 'ERR_DB_WRITE_FAILED') - done() - }) + } }) - it('should return a standard error when the put fails', (done) => { + + it('should return a standard error when the put fails', async () => { const s3 = new S3({ params: { Bucket: 'my-ipfs-bucket' } }) const store = new S3Store('.ipfs/datastore', { s3 }) - standin.replace(s3, 'upload', function (stand, params, callback) { + standin.replace(s3, 'upload', function (stand, params) { expect(params.Key).to.equal('.ipfs/datastore/z/key') stand.restore() - callback(new Error('bad things happened')) + return s3Reject(new Error('bad things happened')) }) - store.put(new Key('/z/key'), Buffer.from('test data'), (err) => { + try { + await store.put(new Key('/z/key'), Buffer.from('test data')) + } catch (err) { expect(err.code).to.equal('ERR_DB_WRITE_FAILED') - done() - }) + } }) }) describe('get', () => { - it('should include the path in the fetch key', (done) => { + it('should include the path in the fetch key', () => { const s3 = new S3({ params: { Bucket: 'my-ipfs-bucket' } }) const store = new S3Store('.ipfs/datastore', { s3 }) - standin.replace(s3, 'getObject', function (stand, params, callback) { + standin.replace(s3, 'getObject', function (stand, params) { expect(params.Key).to.equal('.ipfs/datastore/z/key') stand.restore() - callback(null, { Body: Buffer.from('test') }) + return s3Resolve({ Body: Buffer.from('test') }) }) - store.get(new Key('/z/key'), done) + return store.get(new Key('/z/key')) }) - it('should return a standard not found error code if the key isnt found', (done) => { + + it('should return a standard not found error code if the key isnt found', async () => { const s3 = new S3({ params: { Bucket: 'my-ipfs-bucket' } }) const store = new S3Store('.ipfs/datastore', { s3 }) - standin.replace(s3, 'getObject', function (stand, params, callback) { + standin.replace(s3, 'getObject', function (stand, params) { expect(params.Key).to.equal('.ipfs/datastore/z/key') stand.restore() - let error = new Error('not found') - error.statusCode = 404 - callback(error) + return s3Reject(new S3Error('NotFound', 404)) }) - store.get(new Key('/z/key'), (err) => { + try { + await store.get(new Key('/z/key')) + } catch (err) { expect(err.code).to.equal('ERR_NOT_FOUND') - done() - }) + } }) }) describe('delete', () => { - it('should return a standard delete error if deletion fails', (done) => { + it('should return a standard delete error if deletion fails', async () => { const s3 = new S3({ params: { Bucket: 'my-ipfs-bucket' } }) const store = new S3Store('.ipfs/datastore', { s3 }) - standin.replace(s3, 'deleteObject', function (stand, params, callback) { + standin.replace(s3, 'deleteObject', function (stand, params) { expect(params.Key).to.equal('.ipfs/datastore/z/key') stand.restore() - callback(new Error('bad things')) + return s3Reject(new Error('bad things')) }) - store.delete(new Key('/z/key'), (err) => { + try { + await store.delete(new Key('/z/key')) + } catch (err) { expect(err.code).to.equal('ERR_DB_DELETE_FAILED') - done() - }) + } }) }) describe('open', () => { - it('should return a standard open error if the head request fails with an unknown error', (done) => { + it('should return a standard open error if the head request fails with an unknown error', async () => { const s3 = new S3({ params: { Bucket: 'my-ipfs-bucket' } }) const store = new S3Store('.ipfs/datastore', { s3 }) - standin.replace(s3, 'headObject', function (stand, _, callback) { + standin.replace(s3, 'headObject', function (stand, _) { stand.restore() - callback(new Error('unknown')) + return s3Reject(new Error('unknown')) }) - store.open((err) => { + try { + await store.open() + } catch (err) { expect(err.code).to.equal('ERR_DB_OPEN_FAILED') - done() - }) + } }) }) @@ -199,15 +205,14 @@ describe('S3Datastore', () => { describe('interface-datastore', () => { require('interface-datastore/src/tests')({ - setup (callback) { + setup () { let s3 = new S3({ params: { Bucket: 'my-ipfs-bucket' } }) S3Mock(s3) - callback(null, new S3Store('.ipfs/datastore', { s3 })) + return new S3Store('.ipfs/datastore', { s3 }) }, - teardown (callback) { - callback(null) + teardown () { } }) }) diff --git a/test/utils/s3-mock.js b/test/utils/s3-mock.js index 8e54d35..5d69265 100644 --- a/test/utils/s3-mock.js +++ b/test/utils/s3-mock.js @@ -13,6 +13,9 @@ class S3Error extends Error { } } +const s3Resolve = (res) => ({ promise: () => Promise.resolve(res) }) +const s3Reject = (err) => ({ promise: () => Promise.reject(err) }) + /** * Mocks out the s3 calls made by datastore-s3 * @param {S3Instance} s3 @@ -22,40 +25,37 @@ module.exports = function (s3) { const mocks = {} const storage = {} - mocks.deleteObject = standin.replace(s3, 'deleteObject', (stand, params, callback) => { + mocks.deleteObject = standin.replace(s3, 'deleteObject', (stand, params) => { expect(params.Key).to.be.a('string') if (storage[params.Key]) { delete storage[params.Key] - callback(null, {}) - } else { - callback(new S3Error('NotFound', 404), null) + return s3Resolve({}) } + return s3Reject(new S3Error('NotFound', 404)) }) - mocks.getObject = standin.replace(s3, 'getObject', (stand, params, callback) => { + mocks.getObject = standin.replace(s3, 'getObject', (stand, params) => { expect(params.Key).to.be.a('string') if (storage[params.Key]) { - callback(null, { Body: storage[params.Key] }) - } else { - callback(new S3Error('NotFound', 404), null) + return s3Resolve({ Body: storage[params.Key] }) } + return s3Reject(new S3Error('NotFound', 404)) }) - mocks.headBucket = standin.replace(s3, 'headBucket', (stand, params, callback) => { + mocks.headBucket = standin.replace(s3, 'headBucket', (stand, params) => { expect(params.Bucket).to.be.a('string') - callback(null) + return s3Resolve() }) - mocks.headObject = standin.replace(s3, 'headObject', (stand, params, callback) => { + mocks.headObject = standin.replace(s3, 'headObject', (stand, params) => { expect(params.Key).to.be.a('string') if (storage[params.Key]) { - callback(null, {}) - } else { - callback(new S3Error('NotFound', 404), null) + return s3Resolve({}) } + return s3Reject(new S3Error('NotFound', 404)) }) - mocks.listObjectV2 = standin.replace(s3, 'listObjectsV2', (stand, params, callback) => { + mocks.listObjectV2 = standin.replace(s3, 'listObjectsV2', (stand, params) => { expect(params.Prefix).to.be.a('string') const results = { Contents: [] @@ -69,13 +69,17 @@ module.exports = function (s3) { } } - callback(null, results) + return s3Resolve(results) }) - mocks.upload = standin.replace(s3, 'upload', (stand, params, callback) => { + mocks.upload = standin.replace(s3, 'upload', (stand, params) => { expect(params.Key).to.be.a('string') expect(params.Body).to.be.instanceof(Buffer) storage[params.Key] = params.Body - callback(null) + return s3Resolve({}) }) } + +module.exports.S3Error = S3Error +module.exports.s3Resolve = s3Resolve +module.exports.s3Reject = s3Reject