Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Revamp #83

Merged
merged 18 commits into from
May 7, 2021
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,19 @@
Changelog
=========

## v3.0.0
- Changed WeakMap for private field (require node 12)
- Switch to ESM
- blob.stream() return a subset of whatwg stream which is the async iterable
(it no longer return a node stream)
- Reduced the dependency of Buffer by changing to global TextEncoder/Decoder (require node 11)
- Disabled xo since it could understand private fields (#)
- No longer transform the type to lowercase (https://github.com/w3c/FileAPI/issues/43)
This is more loose than strict, keys should be lowercased, but values should not.
It would require a more proper mime type parser - so we just made it loose.
- index.js can now be imported by browser & deno since it no longer depends on any
core node features (but why would you? other environment can benefit from it)

## v2.1.1
- Add nullish values checking in Symbol.hasInstance (#82)
- Add generated typings for from.js file (#80)
Expand Down
9 changes: 5 additions & 4 deletions from.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
const {statSync, createReadStream} = require('fs');
const Blob = require('./index.js');
const DOMException = require('domexception');
import {statSync, createReadStream} from 'fs';
import DOMException from 'domexception';
import Blob from './index.js';

/**
* @param {string} path filepath on the disk
Expand Down Expand Up @@ -54,4 +54,5 @@ class BlobDataItem {
}
}

module.exports = blobFrom;
export default blobFrom;
export {Blob};
119 changes: 72 additions & 47 deletions index.js
Original file line number Diff line number Diff line change
@@ -1,73 +1,61 @@
const {Readable} = require('stream');

/**
* @type {WeakMap<Blob, {type: string, size: number, parts: (Blob | Buffer)[] }>}
*/
const wm = new WeakMap();

async function * read(parts) {
for (const part of parts) {
if ('stream' in part) {
yield * part.stream();
} else {
yield part;
}
}
}
// 64 KiB (same size chrome slice theirs blob into Uint8array's)
const POOL_SIZE = 65536;

class Blob {

/** @type {Array.<(Blob|Uint8Array)>} */
#parts = [];
#type = '';
#size = 0;
#avoidClone = false

/**
* The Blob() constructor returns a new Blob object. The content
* of the blob consists of the concatenation of the values given
* in the parameter array.
*
* @param {(ArrayBufferLike | ArrayBufferView | Blob | Buffer | string)[]} blobParts
* @param {(ArrayBufferLike | ArrayBufferView | Blob | string)[]} blobParts
jimmywarting marked this conversation as resolved.
Show resolved Hide resolved
* @param {{ type?: string }} [options]
*/
constructor(blobParts = [], options = {}) {
let size = 0;

const parts = blobParts.map(element => {
let buffer;
if (element instanceof Buffer) {
buffer = element;
} else if (ArrayBuffer.isView(element)) {
buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
let part;
if (ArrayBuffer.isView(element)) {
part = new Uint8Array(element.buffer.slice(element.byteOffset, element.byteOffset + element.byteLength));
} else if (element instanceof ArrayBuffer) {
buffer = Buffer.from(element);
part = new Uint8Array(element.slice(0));
} else if (element instanceof Blob) {
buffer = element;
part = element;
} else {
buffer = Buffer.from(typeof element === 'string' ? element : String(element));
part = new TextEncoder().encode(String(element));
}

// eslint-disable-next-line unicorn/explicit-length-check
size += buffer.length || buffer.size || 0;
return buffer;
size += ArrayBuffer.isView(part) ? part.byteLength : part.size;
return part;
});

const type = options.type === undefined ? '' : String(options.type).toLowerCase();
const type = options.type === undefined ? '' : String(options.type);

wm.set(this, {
type: /[^\u0020-\u007E]/.test(type) ? '' : type,
size,
parts
});
this.#type = /[^\u0020-\u007E]/.test(type) ? '' : type;
this.#size = size;
this.#parts = parts;
}

/**
* The Blob interface's size property returns the
* size of the Blob in bytes.
*/
get size() {
return wm.get(this).size;
return this.#size;
}

/**
* The type property of a Blob object returns the MIME type of the file.
*/
get type() {
return wm.get(this).type;
return this.#type;
}

/**
Expand All @@ -78,7 +66,17 @@ class Blob {
* @return {Promise<string>}
*/
async text() {
return Buffer.from(await this.arrayBuffer()).toString();
this.#avoidClone = true
// More optimized than using this.arrayBuffer()
// that requires twice as much ram
const decoder = new TextDecoder();
let str = '';
for await (let part of this.stream()) {
str += decoder.decode(part, { stream: true });
}
// Remaining
str += decoder.decode();
return str;
}

/**
Expand All @@ -89,6 +87,7 @@ class Blob {
* @return {Promise<ArrayBuffer>}
*/
async arrayBuffer() {
this.#avoidClone = true
const data = new Uint8Array(this.size);
let offset = 0;
for await (const chunk of this.stream()) {
Expand All @@ -100,13 +99,31 @@ class Blob {
}

/**
* The Blob interface's stream() method is difference from native
* and uses node streams instead of whatwg streams.
* The Blob stream() implements partial support of the whatwg stream
* by being only async iterable.
*
* @returns {Readable} Node readable stream
* @returns {AsyncGenerator<Uint8Array>}
*/
stream() {
return Readable.from(read(wm.get(this).parts));
async * stream() {
jimmywarting marked this conversation as resolved.
Show resolved Hide resolved
for (let part of this.#parts) {
if ('stream' in part) {
yield * part.stream();
} else {
if (this.#avoidClone) {
yield part
} else {
let position = part.byteOffset;
let end = part.byteOffset + part.byteLength;
while (position !== end) {
jimmywarting marked this conversation as resolved.
Show resolved Hide resolved
const size = Math.min(end - position, POOL_SIZE);
const chunk = part.buffer.slice(position, position + size);
yield new Uint8Array(chunk);
position += chunk.byteLength;
}
}
}
}
this.#avoidClone = false
}

/**
Expand All @@ -125,7 +142,7 @@ class Blob {
let relativeEnd = end < 0 ? Math.max(size + end, 0) : Math.min(end, size);

const span = Math.max(relativeEnd - relativeStart, 0);
const parts = wm.get(this).parts.values();
const parts = this.#parts;
const blobParts = [];
let added = 0;

Expand All @@ -137,9 +154,15 @@ class Blob {
relativeStart -= size;
relativeEnd -= size;
} else {
const chunk = part.slice(relativeStart, Math.min(size, relativeEnd));
let chunk
if (ArrayBuffer.isView(part)) {
chunk = part.subarray(relativeStart, Math.min(size, relativeEnd));
added += chunk.byteLength
} else {
chunk = part.slice(relativeStart, Math.min(size, relativeEnd));
added += chunk.size
}
blobParts.push(chunk);
added += ArrayBuffer.isView(chunk) ? chunk.byteLength : chunk.size;
relativeStart = 0; // All next sequental parts should start at 0

// don't add the overflow to new blobParts
Expand All @@ -150,7 +173,8 @@ class Blob {
}

const blob = new Blob([], {type: String(type).toLowerCase()});
Object.assign(wm.get(blob), {size: span, parts: blobParts});
blob.#size = span;
blob.#parts = blobParts;

return blob;
}
Expand All @@ -177,4 +201,5 @@ Object.defineProperties(Blob.prototype, {
slice: {enumerable: true}
});

module.exports = Blob;
export default Blob;
export { Blob };
jimmywarting marked this conversation as resolved.
Show resolved Hide resolved
10 changes: 5 additions & 5 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,17 +1,18 @@
{
"name": "fetch-blob",
"version": "2.1.2",
"version": "3.0.0",
"description": "A Blob implementation in Node.js, originally from node-fetch.",
"main": "index.js",
"type": "module",
"files": [
"from.js",
"index.js",
"index.d.ts",
"from.d.ts"
],
"scripts": {
"lint": "xo",
"test": "xo && ava",
"lint": "xo test.js from.js",
"test": "npm run lint && ava",
"report": "c8 --reporter json --reporter text ava",
"coverage": "c8 --reporter json --reporter text ava && codecov -f coverage/coverage-final.json",
"prepublishOnly": "tsc --declaration --emitDeclarationOnly --allowJs index.js from.js"
Expand All @@ -22,7 +23,7 @@
"node-fetch"
],
"engines": {
"node": "^10.17.0 || >=12.3.0"
"node": ">=12.3.0"
},
"author": "David Frank",
"license": "MIT",
Expand Down Expand Up @@ -61,7 +62,6 @@
"c8": "^7.7.1",
"codecov": "^3.8.1",
"domexception": "^2.0.1",
"get-stream": "^6.0.1",
"node-fetch": "^2.6.1",
"typescript": "^4.2.4",
"xo": "^0.38.2"
Expand Down
57 changes: 46 additions & 11 deletions test.js
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
const fs = require('fs');
const test = require('ava');
const getStream = require('get-stream');
const {Response} = require('node-fetch');
const {TextDecoder} = require('util');
const Blob = require('./index.js');
const blobFrom = require('./from.js');
import fs from 'fs';
import test from 'ava';
import {Response} from 'node-fetch';
import {Readable} from 'stream';
import Blob from './index.js';
import blobFrom from './from.js';

test('new Blob()', t => {
const blob = new Blob(); // eslint-disable-line no-unused-vars
Expand Down Expand Up @@ -81,8 +80,10 @@ test('Blob stream()', async t => {
const data = 'a=1';
const type = 'text/plain';
const blob = new Blob([data], {type});
const result = await getStream(blob.stream());
t.is(result, data);

for await (const chunk of blob.stream()) {
t.is(chunk.join(), [97, 61, 49].join());
}
});

test('Blob toString()', t => {
Expand Down Expand Up @@ -131,7 +132,7 @@ test('Blob works with node-fetch Response.blob()', async t => {
const data = 'a=1';
const type = 'text/plain';
const blob = new Blob([data], {type});
const response = new Response(blob);
const response = new Response(Readable.from(blob.stream()));
const blob2 = await response.blob();
t.is(await blob2.text(), data);
});
Expand All @@ -140,7 +141,7 @@ test('Blob works with node-fetch Response.text()', async t => {
const data = 'a=1';
const type = 'text/plain';
const blob = new Blob([data], {type});
const response = new Response(blob);
const response = new Response(Readable.from(blob.stream()));
const text = await response.text();
t.is(text, data);
});
Expand Down Expand Up @@ -178,3 +179,37 @@ test('Blob-ish class is an instance of Blob', t => {
test('Instanceof check returns false for nullish values', t => {
t.false(null instanceof Blob);
});

test('Dose not lowercase the blob type', t => {
const type = 'multipart/form-data; boundary=----WebKitFormBoundaryTKqdrVt01qOBltBd';
t.is(new Blob([], {type}).type, type);
});
jimmywarting marked this conversation as resolved.
Show resolved Hide resolved

test('Parts are immutable', async t => {
const buf = new Uint8Array([97]);
const blob = new Blob([buf]);
buf[0] = 98;
t.is(await blob.text(), 'a');
});

test('Blobs are immutable', async t => {
const buf = new Uint8Array([97]);
const blob = new Blob([buf]);
const chunk = await blob.stream().next();
t.is(chunk.value[0], 97);
chunk.value[0] = 98;
t.is(await blob.text(), 'a');
});

// This was necessary to avoid large ArrayBuffer clones (slice)
test('Large chunks are divided into smaller chunks', async t => {
const buf = new Uint8Array(65590);
const blob = new Blob([buf]);
let i = 0;
// eslint-disable-next-line no-unused-vars
for await (const chunk of blob.stream()) {
i++;
}

t.is(i === 2, true);
});