Skip to content
This repository was archived by the owner on Mar 23, 2023. It is now read-only.

Commit 6c74394

Browse files
achingbrainjacobheun
authored andcommitted
feat: adds interface-datastore streaming api
Uses the Adapter from interface-datastore to support the new streaming api for puts/get/etc.
1 parent 4251456 commit 6c74394

File tree

2 files changed

+19
-49
lines changed

2 files changed

+19
-49
lines changed

package.json

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -36,19 +36,18 @@
3636
},
3737
"homepage": "https://github.com/ipfs/js-datastore-s3#readme",
3838
"dependencies": {
39-
"datastore-core": "^0.7.0",
40-
"interface-datastore": "^0.7.0",
41-
"streaming-iterables": "^4.1.0",
39+
"datastore-core": "^1.1.0",
40+
"interface-datastore": "^1.0.2",
4241
"upath": "^1.1.0"
4342
},
4443
"devDependencies": {
45-
"aegir": "^20.4.1",
44+
"aegir": "^22.0.0",
4645
"aws-sdk": "^2.579.0",
4746
"chai": "^4.2.0",
4847
"dirty-chai": "^2.0.1",
4948
"flow-bin": "^0.93.0",
5049
"flow-typed": "^2.5.1",
51-
"ipfs-repo": "^0.29.2",
50+
"ipfs-repo": "^2.1.1",
5251
"stand-in": "^4.2.0"
5352
},
5453
"peerDependencies": {

src/index.js

Lines changed: 15 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -4,16 +4,15 @@
44
/* :: import type {Batch, Query, QueryResult, Callback} from 'interface-datastore' */
55
const assert = require('assert')
66
const path = require('upath')
7-
const {
8-
filter,
9-
map,
10-
take
11-
} = require('streaming-iterables')
127

13-
const IDatastore = require('interface-datastore')
14-
const sortAll = IDatastore.utils.sortAll
15-
const Key = IDatastore.Key
16-
const Errors = IDatastore.Errors
8+
const {
9+
Adapter,
10+
Key,
11+
Errors,
12+
utils: {
13+
filter
14+
}
15+
} = require('interface-datastore')
1716
const createRepo = require('./s3-repo')
1817

1918
/* :: export type S3DSInputOptions = {
@@ -42,13 +41,15 @@ declare type S3Instance = {
4241
* Keys need to be sanitized before use, as they are written
4342
* to the file system as is.
4443
*/
45-
class S3Datastore {
44+
class S3Datastore extends Adapter {
4645
/* :: path: string */
4746
/* :: opts: S3DSInputOptions */
4847
/* :: bucket: string */
4948
/* :: createIfMissing: boolean */
5049

5150
constructor (path /* : string */, opts /* : S3DSInputOptions */) {
51+
super()
52+
5253
this.path = path
5354
this.opts = opts
5455
const {
@@ -209,13 +210,7 @@ class S3Datastore {
209210
}
210211
}
211212

212-
/**
213-
* Query the store.
214-
*
215-
* @param {Object} q
216-
* @returns {Iterable}
217-
*/
218-
query (q /* : Query<Buffer> */) /* : QueryResult<Buffer> */ {
213+
async * _all (q, options) {
219214
const prefix = path.join(this.path, q.prefix || '')
220215

221216
let values = true
@@ -230,36 +225,18 @@ class S3Datastore {
230225
let it = this._listKeys(params)
231226

232227
if (q.prefix != null) {
233-
it = filter(k => k.toString().startsWith(q.prefix), it)
228+
it = filter(it, k => k.toString().startsWith(q.prefix))
234229
}
235230

236-
it = map(async (key) => {
231+
for await (const key of it) {
237232
const res /* : QueryEntry<Buffer> */ = { key }
238233
if (values) {
239234
// Fetch the object Buffer from s3
240235
res.value = await this.get(key)
241236
}
242-
return res
243-
}, it)
244-
245-
if (Array.isArray(q.filters)) {
246-
it = q.filters.reduce((it, f) => filter(f, it), it)
247-
}
248237

249-
if (Array.isArray(q.orders)) {
250-
it = q.orders.reduce((it, f) => sortAll(it, f), it)
238+
yield res
251239
}
252-
253-
if (q.offset != null) {
254-
let i = 0
255-
it = filter(() => i++ >= q.offset, it)
256-
}
257-
258-
if (q.limit != null) {
259-
it = take(q.limit, it)
260-
}
261-
262-
return it
263240
}
264241

265242
/**
@@ -280,12 +257,6 @@ class S3Datastore {
280257
throw Errors.dbOpenFailedError(err)
281258
}
282259
}
283-
284-
/**
285-
* Close the store.
286-
*/
287-
close () {
288-
}
289260
}
290261

291262
module.exports = S3Datastore

0 commit comments

Comments
 (0)