Skip to content

Commit

Permalink
feat: encodeInto(obj, dest, [options])
Browse files Browse the repository at this point in the history
  • Loading branch information
rvagg committed Sep 13, 2023
1 parent 7ab7a4e commit 36a01e5
Show file tree
Hide file tree
Showing 22 changed files with 194 additions and 34 deletions.
17 changes: 15 additions & 2 deletions bench/bench.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,11 @@

import assert from 'assert'
import { garbage } from 'ipld-garbage'
import { decode, encode } from '../cborg.js'
import { decode, encode, encodeInto } from '../cborg.js'
import borc from 'borc'

const WITH_CBORG_FIXED_DESTINATION = true

let writebuf = ''
const write = process.stdout
? process.stdout.write.bind(process.stdout)
Expand All @@ -27,6 +29,17 @@ function runWith (description, count, targetTime, size, options) {
return borcDecoder.decodeAll(bytes)[0]
}

let cborgEncoder = WITH_CBORG_FIXED_DESTINATION ? null : encode

const cborgEncode = (bytes) => {
if (!cborgEncoder) {
// account for initial allocation & setup time in benchmark
const fixedDestination = new Uint8Array(10 * 1024 * 1024)
cborgEncoder = (bytes) => encodeInto(bytes, fixedDestination)
}
return cborgEncoder(bytes)
}

const fixtures = []

console.log(`${description} @ ${count.toLocaleString()}`)
Expand Down Expand Up @@ -91,7 +104,7 @@ function runWith (description, count, targetTime, size, options) {
}

return [
cmp('encode', () => enc(encode), () => enc(borc.encode)),
cmp('encode', () => enc(cborgEncode), () => enc(borc.encode)),
cmp('decode', () => dec(decode), () => dec(borcDecode))
]
}
Expand Down
3 changes: 2 additions & 1 deletion cborg.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { encode } from './lib/encode.js'
import { encode, encodeInto } from './lib/encode.js'
import { decode, decodeFirst } from './lib/decode.js'
import { Token, Type } from './lib/token.js'

Expand All @@ -15,6 +15,7 @@ export {
decode,
decodeFirst,
encode,
encodeInto,
Token,
Type
}
8 changes: 7 additions & 1 deletion interface.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import { Token } from './lib/token'
import { Bl } from './lib/bl'

export type TokenOrNestedTokens = Token | Token[] | TokenOrNestedTokens[]

Expand Down Expand Up @@ -54,3 +53,10 @@ export interface EncodeOptions {
quickEncodeToken?: QuickEncodeToken,
typeEncoders?: { [typeName: string]: OptionalTypeEncoder }
}

export interface Bl {
chunks: (Uint8Array | number[])[];
reset(): void;
push(bytes: Uint8Array | number[]): void;
toBytes(reset?: boolean | undefined): Uint8Array;
}
40 changes: 40 additions & 0 deletions lib/bl.js
Original file line number Diff line number Diff line change
Expand Up @@ -122,3 +122,43 @@ export class Bl {
return byts
}
}

export class U8Bl {
/**
* @param {Uint8Array} dest
*/
constructor (dest) {
this.dest = dest
/** @type {number} */
this.cursor = 0
/** @type {Uint8Array[]} */
this.chunks = [dest]
}

reset () {
this.cursor = 0
}

/**
* @param {Uint8Array|number[]} bytes
*/
push (bytes) {
if (this.cursor + bytes.length > this.dest.length) {
throw new Error('write out of bounds, destination buffer is too small')
}
this.dest.set(bytes, this.cursor)
this.cursor += bytes.length
}

/**
* @param {boolean} [reset]
* @returns {Uint8Array}
*/
toBytes (reset = false) {
const byts = this.dest.subarray(0, this.cursor)
if (reset) {
this.reset()
}
return byts
}
}
41 changes: 30 additions & 11 deletions lib/encode.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { is } from './is.js'
import { Token, Type } from './token.js'
import { Bl } from './bl.js'
import { Bl as _Bl, U8Bl } from './bl.js'
import { encodeErrPrefix } from './common.js'
import { quickEncodeToken } from './jump.js'
import { asU8A } from './byte-utils.js'
Expand All @@ -21,6 +21,7 @@ import { encodeFloat } from './7float.js'
* @typedef {import('../interface').StrictTypeEncoder} StrictTypeEncoder
* @typedef {import('../interface').TokenTypeEncoder} TokenTypeEncoder
* @typedef {import('../interface').TokenOrNestedTokens} TokenOrNestedTokens
* @typedef {import('../interface').Bl} Bl
*/

/** @type {EncodeOptions} */
Expand All @@ -46,7 +47,7 @@ export function makeCborEncoders () {

const cborEncoders = makeCborEncoders()

const buf = new Bl()
const buf = new _Bl()

/** @implements {Reference} */
class Ref {
Expand Down Expand Up @@ -424,9 +425,14 @@ function tokensToEncoded (buf, tokens, encoders, options) {
* @param {any} data
* @param {TokenTypeEncoder[]} encoders
* @param {EncodeOptions} options
* @param {Uint8Array} [destination]
* @returns {Uint8Array}
*/
function encodeCustom (data, encoders, options) {
function encodeCustom (data, encoders, options, destination) {
// arg ordering is different to encodeInto for backward compatibility
const hasDest = destination instanceof Uint8Array
let writeTo = hasDest ? new U8Bl(destination) : buf

const tokens = objectToTokens(data, options)
if (!Array.isArray(tokens) && options.quickEncodeToken) {
const quickBytes = options.quickEncodeToken(tokens)
Expand All @@ -436,19 +442,21 @@ function encodeCustom (data, encoders, options) {
const encoder = encoders[tokens.type.major]
if (encoder.encodedSize) {
const size = encoder.encodedSize(tokens, options)
const buf = new Bl(size)
encoder(buf, tokens, options)
if (!hasDest) {
writeTo = new _Bl(size)
}
encoder(writeTo, tokens, options)
/* c8 ignore next 4 */
// this would be a problem with encodedSize() functions
if (buf.chunks.length !== 1) {
if (writeTo.chunks.length !== 1) {
throw new Error(`Unexpected error: pre-calculated length for ${tokens} was wrong`)
}
return asU8A(buf.chunks[0])
return hasDest ? writeTo.toBytes() : asU8A(writeTo.chunks[0])
}
}
buf.reset()
tokensToEncoded(buf, tokens, encoders, options)
return buf.toBytes(true)
writeTo.reset()
tokensToEncoded(writeTo, tokens, encoders, options)
return writeTo.toBytes(true)
}

/**
Expand All @@ -461,4 +469,15 @@ function encode (data, options) {
return encodeCustom(data, cborEncoders, options)
}

export { objectToTokens, encode, encodeCustom, Ref }
/**
* @param {any} data
* @param {Uint8Array} destination
* @param {EncodeOptions} [options]
* @returns {Uint8Array}
*/
function encodeInto (data, destination, options) {
options = Object.assign({}, defaultEncodeOptions, options)
return encodeCustom(data, cborEncoders, options, destination)
}

export { objectToTokens, encode, encodeCustom, encodeInto, Ref }
7 changes: 6 additions & 1 deletion test/test-0uint.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import chai from 'chai'

import { decode, encode } from '../cborg.js'
import { decode, encode, encodeInto } from '../cborg.js'
import { fromHex, toHex } from '../lib/byte-utils.js'

const { assert } = chai
Expand All @@ -27,6 +27,8 @@ const fixtures = [
{ data: '1bffffffffffffffff', expected: BigInt('18446744073709551615'), type: 'uint64' }
]

const fixedDest = new Uint8Array(1024)

describe('uint', () => {
describe('decode', () => {
for (const fixture of fixtures) {
Expand All @@ -53,8 +55,10 @@ describe('uint', () => {
it(`should encode ${fixture.type}=${fixture.expected}`, () => {
if (fixture.strict === false) {
assert.notStrictEqual(toHex(encode(fixture.expected)), fixture.data, `encode ${fixture.type} !strict`)
assert.notStrictEqual(toHex(encodeInto(fixture.expected, fixedDest)), fixture.data, `encode ${fixture.type} !strict`)
} else {
assert.strictEqual(toHex(encode(fixture.expected)), fixture.data, `encode ${fixture.type}`)
assert.strictEqual(toHex(encodeInto(fixture.expected, fixedDest)), fixture.data, `encode ${fixture.type}`)
}
})
}
Expand All @@ -66,6 +70,7 @@ describe('uint', () => {
if (fixture.strict !== false) {
it(`should roundtrip ${fixture.type}=${fixture.expected}`, () => {
assert.ok(decode(encode(fixture.expected)) === fixture.expected, `roundtrip ${fixture.type}`)
assert.ok(decode(encodeInto(fixture.expected, fixedDest)) === fixture.expected, `roundtrip ${fixture.type}`)
})
}
}
Expand Down
7 changes: 6 additions & 1 deletion test/test-1negint.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import chai from 'chai'

import { decode, encode } from '../cborg.js'
import { decode, encode, encodeInto } from '../cborg.js'
import { fromHex, toHex } from '../lib/byte-utils.js'

const { assert } = chai
Expand All @@ -28,6 +28,8 @@ const fixtures = [
{ data: '3bffffffffffffffff', expected: BigInt('-18446744073709551616'), type: 'negint64' }
]

const fixedDest = new Uint8Array(1024)

describe('negint', () => {
describe('decode', () => {
for (const fixture of fixtures) {
Expand All @@ -48,8 +50,10 @@ describe('negint', () => {
it(`should encode ${fixture.type}=${fixture.expected}`, () => {
if (fixture.strict === false) {
assert.notStrictEqual(toHex(encode(fixture.expected)), fixture.data, `encode ${fixture.type} !strict`)
assert.notStrictEqual(toHex(encodeInto(fixture.expected, fixedDest)), fixture.data, `encode ${fixture.type} !strict`)
} else {
assert.strictEqual(toHex(encode(fixture.expected)), fixture.data, `encode ${fixture.type}`)
assert.strictEqual(toHex(encodeInto(fixture.expected, fixedDest)), fixture.data, `encode ${fixture.type}`)
}
})
}
Expand All @@ -60,6 +64,7 @@ describe('negint', () => {
for (const fixture of fixtures) {
it(`should roundtrip ${fixture.type}=${fixture.expected}`, () => {
assert.ok(decode(encode(fixture.expected)) === fixture.expected, `roundtrip ${fixture.type}`)
assert.ok(decode(encodeInto(fixture.expected, fixedDest)) === fixture.expected, `roundtrip ${fixture.type}`)
})
}
})
Expand Down
6 changes: 5 additions & 1 deletion test/test-2bytes.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import chai from 'chai'

import { decode, encode } from '../cborg.js'
import { decode, encode, encodeInto } from '../cborg.js'
import { useBuffer, fromHex, toHex } from '../lib/byte-utils.js'

const { assert } = chai
Expand Down Expand Up @@ -42,6 +42,8 @@ const fixtures = [
}
]

const fixedDest = new Uint8Array(65536 + 8)

// fill up byte arrays we can validate in strict mode, the minimal size for each
// excluding 64-bit because 4G is just too big
;(() => {
Expand Down Expand Up @@ -111,8 +113,10 @@ describe('bytes', () => {
assert.throws(() => encode(data), Error, /^CBOR encode error: number too large to encode \(-\d+\)$/)
} else if (fixture.strict === false) {
assert.notStrictEqual(toHex(encode(data)), expectedHex, `encode ${fixture.type} !strict`)
assert.notStrictEqual(toHex(encodeInto(data, fixedDest)), expectedHex, `encode ${fixture.type} !strict`)
} else {
assert.strictEqual(toHex(encode(data)), expectedHex, `encode ${fixture.type}`)
assert.strictEqual(toHex(encodeInto(data, fixedDest)), expectedHex, `encode ${fixture.type}`)
}
})
}
Expand Down
6 changes: 5 additions & 1 deletion test/test-3string.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import chai from 'chai'

import { decode, encode } from '../cborg.js'
import { decode, encode, encodeInto } from '../cborg.js'
import { fromHex, toHex } from '../lib/byte-utils.js'

const { assert } = chai
Expand Down Expand Up @@ -52,6 +52,8 @@ const fixtures = [
}
]

const fixedDest = new Uint8Array(65536 + 8)

// fill up byte arrays converted to strings so we can validate in strict mode,
// the minimal size for each excluding 64-bit because 4G is just too big
;(() => {
Expand Down Expand Up @@ -127,8 +129,10 @@ describe('string', () => {
assert.throws(() => encode(data), Error, /^CBOR encode error: number too large to encode \(-\d+\)$/)
} else if (fixture.strict === false) {
assert.notStrictEqual(toHex(encode(data)), expectedHex, `encode ${fixture.type} !strict`)
assert.notStrictEqual(toHex(encodeInto(data, fixedDest)), expectedHex, `encode ${fixture.type} !strict`)
} else {
assert.strictEqual(toHex(encode(data)), expectedHex, `encode ${fixture.type}`)
assert.strictEqual(toHex(encodeInto(data, fixedDest)), expectedHex, `encode ${fixture.type}`)
}
})
}
Expand Down
6 changes: 5 additions & 1 deletion test/test-4array.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import chai from 'chai'

import { decode, encode } from '../cborg.js'
import { decode, encode, encodeInto } from '../cborg.js'
import { fromHex, toHex } from '../lib/byte-utils.js'

const { assert } = chai
Expand Down Expand Up @@ -35,6 +35,8 @@ const fixtures = [
{ data: '9b000000000000000403040506', expected: [3, 4, 5, 6], type: 'array 4 ints, length64', strict: false }
]

const fixedDest = new Uint8Array(1024)

describe('array', () => {
describe('decode', () => {
for (const fixture of fixtures) {
Expand Down Expand Up @@ -63,8 +65,10 @@ describe('array', () => {
assert.throws(encode.bind(null, fixture.expected), Error, /^CBOR encode error: number too large to encode \(\d+\)$/)
} else if (fixture.strict === false) {
assert.notDeepEqual(toHex(encode(fixture.expected)), fixture.data, `encode ${fixture.type} !strict`)
assert.notDeepEqual(toHex(encodeInto(fixture.expected, fixedDest)), fixture.data, `encode ${fixture.type} !strict`)
} else {
assert.strictEqual(toHex(encode(fixture.expected)), fixture.data, `encode ${fixture.type}`)
assert.strictEqual(toHex(encodeInto(fixture.expected, fixedDest)), fixture.data, `encode ${fixture.type}`)
}
})
}
Expand Down
8 changes: 5 additions & 3 deletions test/test-5map.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import chai from 'chai'

import { decode, encode } from '../cborg.js'
import { decode, encode, encodeInto } from '../cborg.js'
import { fromHex, toHex } from '../lib/byte-utils.js'

const { assert } = chai
Expand Down Expand Up @@ -133,6 +133,8 @@ const fixtures = [
{ data: 'bb0000000000000001616101', expected: { a: 1 }, type: 'map 1 pair, length64', strict: false }
]

const fixedDest = new Uint8Array(1024)

function toMap (arr) {
const m = new Map()
for (const [key, value] of arr) {
Expand Down Expand Up @@ -205,9 +207,9 @@ describe('map', () => {
if (fixture.unsafe) {
assert.throws(encode.bind(null, toEncode), Error, /^CBOR encode error: number too large to encode \(\d+\)$/)
} else if (fixture.strict === false || fixture.roundtrip === false) {
assert.notDeepEqual(toHex(encode(toEncode)), fixture.data, `encode ${fixture.type} !strict`)
assert.notDeepEqual(toHex(encodeInto(toEncode, fixedDest)), fixture.data, `encode ${fixture.type} !strict`)
} else {
assert.strictEqual(toHex(encode(toEncode)), fixture.data, `encode ${fixture.type}`)
assert.strictEqual(toHex(encodeInto(toEncode, fixedDest)), fixture.data, `encode ${fixture.type}`)
}
})
}
Expand Down
Loading

0 comments on commit 36a01e5

Please sign in to comment.