Skip to content

Commit

Permalink
Add cache option (sindresorhus#284)
Browse files Browse the repository at this point in the history
  • Loading branch information
lukechilds authored and sindresorhus committed Oct 15, 2017
1 parent 33cbb6f commit 3c79205
Show file tree
Hide file tree
Showing 4 changed files with 235 additions and 45 deletions.
104 changes: 59 additions & 45 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ const Transform = require('stream').Transform;
const urlLib = require('url');
const fs = require('fs');
const querystring = require('querystring');
const CacheableRequest = require('cacheable-request');
const duplexer3 = require('duplexer3');
const intoStream = require('into-stream');
const isStream = require('is-stream');
Expand Down Expand Up @@ -87,7 +88,8 @@ function requestAsEventEmitter(opts) {

let progressInterval;

const req = fn.request(opts, res => {
const cacheableRequest = new CacheableRequest(fn.request, opts.cache);
const cacheReq = cacheableRequest(opts, res => {
clearInterval(progressInterval);

ee.emit('uploadProgress', {
Expand Down Expand Up @@ -172,7 +174,7 @@ function requestAsEventEmitter(opts) {

const response = opts.decompress === true &&
typeof decompressResponse === 'function' &&
req.method !== 'HEAD' ? decompressResponse(progressStream) : progressStream;
opts.method !== 'HEAD' ? decompressResponse(progressStream) : progressStream;

if (!opts.decompress && ['gzip', 'deflate'].indexOf(res.headers['content-encoding']) !== -1) {
opts.encoding = null;
Expand All @@ -190,62 +192,66 @@ function requestAsEventEmitter(opts) {
});
});

req.once('error', err => {
clearInterval(progressInterval);
cacheReq.on('error', err => ee.emit('error', new got.CacheError(err, opts)));

const backoff = opts.retries(++retryCount, err);
cacheReq.on('request', req => {
req.once('error', err => {
clearInterval(progressInterval);

if (backoff) {
setTimeout(get, backoff, opts);
return;
}
const backoff = opts.retries(++retryCount, err);

ee.emit('error', new got.RequestError(err, opts));
});
if (backoff) {
setTimeout(get, backoff, opts);
return;
}

ee.on('request', req => {
ee.emit('uploadProgress', {
percent: 0,
transferred: 0,
total: uploadBodySize
ee.emit('error', new got.RequestError(err, opts));
});

req.connection.on('connect', () => {
const uploadEventFrequency = 150;
ee.on('request', req => {
ee.emit('uploadProgress', {
percent: 0,
transferred: 0,
total: uploadBodySize
});

progressInterval = setInterval(() => {
const lastUploaded = uploaded;
const headersSize = Buffer.byteLength(req._header);
uploaded = req.connection.bytesWritten - headersSize;
req.connection.on('connect', () => {
const uploadEventFrequency = 150;

// Prevent the known issue of `bytesWritten` being larger than body size
if (uploadBodySize && uploaded > uploadBodySize) {
uploaded = uploadBodySize;
}
progressInterval = setInterval(() => {
const lastUploaded = uploaded;
const headersSize = Buffer.byteLength(req._header);
uploaded = req.connection.bytesWritten - headersSize;

// Don't emit events with unchanged progress and
// prevent last event from being emitted, because
// it's emitted when `response` is emitted
if (uploaded === lastUploaded || uploaded === uploadBodySize) {
return;
}
// Prevent the known issue of `bytesWritten` being larger than body size
if (uploadBodySize && uploaded > uploadBodySize) {
uploaded = uploadBodySize;
}

ee.emit('uploadProgress', {
percent: uploadBodySize ? uploaded / uploadBodySize : 0,
transferred: uploaded,
total: uploadBodySize
});
}, uploadEventFrequency);
// Don't emit events with unchanged progress and
// prevent last event from being emitted, because
// it's emitted when `response` is emitted
if (uploaded === lastUploaded || uploaded === uploadBodySize) {
return;
}

ee.emit('uploadProgress', {
percent: uploadBodySize ? uploaded / uploadBodySize : 0,
transferred: uploaded,
total: uploadBodySize
});
}, uploadEventFrequency);
});
});
});

if (opts.gotTimeout) {
clearInterval(progressInterval);
timedOut(req, opts.gotTimeout);
}
if (opts.gotTimeout) {
clearInterval(progressInterval);
timedOut(req, opts.gotTimeout);
}

setImmediate(() => {
ee.emit('request', req);
setImmediate(() => {
ee.emit('request', req);
});
});
};

Expand Down Expand Up @@ -434,6 +440,7 @@ function normalizeArguments(url, opts) {
{
path: '',
retries: 2,
cache: false,
decompress: true,
useElectronNet: false
},
Expand Down Expand Up @@ -589,6 +596,13 @@ class StdError extends Error {
}
}

got.CacheError = class extends StdError {
constructor(error, opts) {
super(error.message, error, opts);
this.name = 'CacheError';
}
};

got.RequestError = class extends StdError {
constructor(error, opts) {
super(error.message, error, opts);
Expand Down
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@
"electron"
],
"dependencies": {
"cacheable-request": "^2.0.0",
"decompress-response": "^3.2.0",
"duplexer3": "^0.1.4",
"get-stream": "^3.0.0",
Expand Down
68 changes: 68 additions & 0 deletions readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ Created because [`request`](https://github.com/request/request) is bloated *(sev

- [Promise & stream API](#api)
- [Request cancelation](#aborting-the-request)
- [RFC compliant caching](#cache-adapters)
- [Follows redirects](#followredirect)
- [Retries on network failure](#retries)
- [Progress events](#onuploadprogress-progress)
Expand Down Expand Up @@ -69,6 +70,10 @@ It's a `GET` request by default, but can be changed in `options`.

Returns a Promise for a `response` object with a `body` property, a `url` property with the request URL or the final URL after redirects, and a `requestUrl` property with the original request URL.

The response object will normally be a [Node.js HTTP response stream](https://nodejs.org/api/http.html#http_class_http_incomingmessage), however if returned from the cache it will be a [responselike object](https://github.com/lukechilds/responselike) which behaves in the same way.

The response will also have a `fromCache` property set with a boolean value.

##### url

Type: `string` `Object`
Expand Down Expand Up @@ -170,6 +175,13 @@ Decompress the response automatically.

If this is disabled, a compressed response is returned as a `Buffer`. This may be useful if you want to handle decompression yourself or stream the raw compressed data.

###### cache

Type: `Object`<br>
Default: `false`

[Cache adapter instance](#cache-adapters) for storing cached data.

###### useElectronNet

Type: `boolean`<br>
Expand Down Expand Up @@ -253,6 +265,10 @@ Each error contains (if available) `statusCode`, `statusMessage`, `host`, `hostn

In Promise mode, the `response` is attached to the error.

#### got.CacheError

When a cache method fails, for example if the database goes down, or there's a filesystem error.

#### got.RequestError

When a request fails. Contains a `code` property with error class code, like `ECONNREFUSED`.
Expand Down Expand Up @@ -316,6 +332,58 @@ request.catch(err => {
request.cancel();
```

<a name="cache-adapters"></a>
## Cache

You can use the JavaScript `Map` type as an in memory cache:

```js
const got = require('got');
const map = new Map();

(async () => {
let response = await got('todomvc.com', {cache: map});
console.log(response.fromCache);
//=> false

response = await got('todomvc.com', {cache: map});
console.log(response.fromCache);
//=> true
})();
```

Got uses [Keyv](https://github.com/lukechilds/keyv) internally to support a wide range of storage adapters. For something more scalable you could use an [official Keyv storage adapter](https://github.com/lukechilds/keyv#official-storage-adapters):

```
npm install @keyv/redis
```

```js
const got = require('got');
const KeyvRedis = require('@keyv/redis');

const redis = new KeyvRedis('redis://user:pass@localhost:6379');

got('todomvc.com', {cache: redis});
```

Got supports anything that follows the Map API so it's easy to write your own storage adapter or use a third-party solution.

For example, the following are all valid storage adapters

```js
const storageAdapter = new Map();
// or
const storageAdapter = require('./my-storage-adapter');
// or
const QuickLRU = require('quick-lru');
const storageAdapter = new QuickLRU({maxSize: 1000});

got('todomvc.com', {cache: storageAdapter});
```

View the [Keyv docs](https://github.com/lukechilds/keyv) for more information on how to use storage adapters.


## Proxies

Expand Down
107 changes: 107 additions & 0 deletions test/cache.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
import test from 'ava';
import got from '../';
import {createServer} from './helpers/server';

let s;

test.before('setup', async () => {
s = await createServer();

let noStoreIndex = 0;
s.on('/no-store', (req, res) => {
res.setHeader('Cache-Control', 'public, no-cache, no-store');
res.end(noStoreIndex.toString());
noStoreIndex++;
});

let cacheIndex = 0;
s.on('/cache', (req, res) => {
res.setHeader('Cache-Control', 'public, max-age=60');
res.end(cacheIndex.toString());
cacheIndex++;
});

let status301Index = 0;
s.on('/301', (req, res) => {
if (status301Index === 0) {
res.setHeader('Cache-Control', 'public, max-age=60');
res.setHeader('Location', s.url + '/302');
res.statusCode = 301;
}
res.end();
status301Index++;
});

let status302Index = 0;
s.on('/302', (req, res) => {
if (status302Index === 0) {
res.setHeader('Cache-Control', 'public, max-age=60');
res.setHeader('Location', s.url + '/cache');
res.statusCode = 302;
}
res.end();
status302Index++;
});

await s.listen(s.port);
});

test('Non cacheable responses are not cached', async t => {
const endpoint = '/no-store';
const cache = new Map();

const firstResponseInt = Number((await got(s.url + endpoint, {cache})).body);
const secondResponseInt = Number((await got(s.url + endpoint, {cache})).body);

t.is(cache.size, 0);
t.true(firstResponseInt < secondResponseInt);
});

test('Cacheable responses are cached', async t => {
const endpoint = '/cache';
const cache = new Map();

const firstResponse = await got(s.url + endpoint, {cache});
const secondResponse = await got(s.url + endpoint, {cache});

t.is(cache.size, 1);
t.is(firstResponse.body, secondResponse.body);
});

test('Cached response is re-encoded to current encoding option', async t => {
const endpoint = '/cache';
const cache = new Map();
const firstEncoding = 'base64';
const secondEncoding = 'hex';

const firstResponse = await got(s.url + endpoint, {cache, encoding: firstEncoding});
const secondResponse = await got(s.url + endpoint, {cache, encoding: secondEncoding});

const expectedSecondResponseBody = Buffer.from(firstResponse.body, firstEncoding).toString(secondEncoding);

t.is(cache.size, 1);
t.is(secondResponse.body, expectedSecondResponseBody);
});

test('Redirects are cached and re-used internally', async t => {
const endpoint = '/301';
const cache = new Map();

const firstResponse = await got(s.url + endpoint, {cache});
const secondResponse = await got(s.url + endpoint, {cache});

t.is(cache.size, 3);
t.is(firstResponse.body, secondResponse.body);
});

test('Cache error throws got.CacheError', async t => {
const endpoint = '/no-store';
const cache = {};

const err = await t.throws(got(s.url + endpoint, {cache}));
t.is(err.name, 'CacheError');
});

test.after('cleanup', async () => {
await s.close();
});

0 comments on commit 3c79205

Please sign in to comment.