diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 0bc3b42..d401a77 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -5,7 +5,7 @@ updates: schedule: interval: daily time: "10:00" - open-pull-requests-limit: 10 + open-pull-requests-limit: 20 commit-message: prefix: "deps" prefix-development: "deps(dev)" diff --git a/.github/workflows/js-test-and-release.yml b/.github/workflows/js-test-and-release.yml index 2c7a14b..359eb97 100644 --- a/.github/workflows/js-test-and-release.yml +++ b/.github/workflows/js-test-and-release.yml @@ -9,7 +9,9 @@ on: permissions: contents: write + id-token: write packages: write + pull-requests: write concurrency: group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event_name == 'push' && github.sha || github.ref }} diff --git a/.github/workflows/semantic-pull-request.yml b/.github/workflows/semantic-pull-request.yml new file mode 100644 index 0000000..bd00f09 --- /dev/null +++ b/.github/workflows/semantic-pull-request.yml @@ -0,0 +1,12 @@ +name: Semantic PR + +on: + pull_request_target: + types: + - opened + - edited + - synchronize + +jobs: + main: + uses: pl-strflt/.github/.github/workflows/reusable-semantic-pull-request.yml@v0.3 diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml new file mode 100644 index 0000000..16d65d7 --- /dev/null +++ b/.github/workflows/stale.yml @@ -0,0 +1,13 @@ +name: Close and mark stale issue + +on: + schedule: + - cron: '0 0 * * *' + +permissions: + issues: write + pull-requests: write + +jobs: + stale: + uses: pl-strflt/.github/.github/workflows/reusable-stale-issue.yml@v0.3 diff --git a/.gitignore b/.gitignore index 32b255b..7ad9e67 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,9 @@ node_modules -sandbox.js -package-lock.json +build dist -.coverage \ No newline at end of file +.docs +.coverage +node_modules +package-lock.json +yarn.lock +.vscode diff --git a/README.md b/README.md index 5eb60c4..99803f3 100644 --- a/README.md +++ b/README.md @@ -1,43 +1,41 @@ -# it-tar +# it-tar [![codecov](https://img.shields.io/codecov/c/github/alanshaw/it-tar.svg?style=flat-square)](https://codecov.io/gh/alanshaw/it-tar) -[![CI](https://img.shields.io/github/workflow/status/alanshaw/it-tar/test%20&%20maybe%20release/master?style=flat-square)](https://github.com/alanshaw/it-tar/actions/workflows/js-test-and-release.yml) +[![CI](https://img.shields.io/github/actions/workflow/status/alanshaw/it-tar/js-test-and-release.yml?branch=master\&style=flat-square)](https://github.com/alanshaw/it-tar/actions/workflows/js-test-and-release.yml?query=branch%3Amaster) -> it-tar is a streaming tar parser (and maybe a generator in the future) and nothing else. It operates purely using async iterables which means you can easily extract/parse tarballs without ever hitting the file system. +> it-tar is a streaming tar parser and generator. It operates purely using async iterables which means you can easily extract/parse tarballs without ever hitting the file system. -## Table of contents +# About -- [Install](#install) -- [Usage](#usage) - - [Packing](#packing) - - [Extracting](#extracting) - - [Headers](#headers) -- [Modifying existing tarballs](#modifying-existing-tarballs) -- [Related](#related) -- [Contribute](#contribute) -- [License](#license) -- [Contribution](#contribution) + `it-tar` [packs](#packing) and [extracts](#extracts) tarballs. -It implementes USTAR with additional support for pax extended headers. It should be compatible with all popular tar distributions out there (gnutar, bsdtar etc) +It implements USTAR with additional support for pax extended headers. It should be compatible with all popular tar distributions out there (gnutar, bsdtar etc) -### Packing +## Example - Packing To create a pack stream use `tar.pack()` and pipe entries to it. -```js -const Tar = require('it-tar') +```TypeScript +import fs from 'node:fs' +import Tar from 'it-tar' import { pipe } from 'it-pipe' -const toIterable = require('stream-to-it') +// @ts-expect-error no types +import { sink } from 'stream-to-it' await pipe( [ @@ -51,25 +49,25 @@ await pipe( header: { name: 'my-stream-test.txt', size: 11 }, body: fs.createReadStream('./my-stream-test.txt') } - ] + ], Tar.pack(), // pipe the pack stream somewhere - toIterable.sink(process.stdout) + sink(process.stdout) ) ``` -### Extracting +## Example - Extracting To extract a stream use `tar.extract()` and pipe a [source iterable](https://gist.github.com/alanshaw/591dc7dd54e4f99338a347ef568d6ee9#source-it) to it. -```js -const Tar = require('it-tar') +```TypeScript +import Tar from 'it-tar' import { pipe } from 'it-pipe' await pipe( - source, // An async iterable (for example a Node.js readable stream) + [Uint8Array.from([0, 1, 2, 3, 4])], // An async iterable (for example a Node.js readable stream) Tar.extract(), - source => { + async source => { for await (const entry of source) { // entry.header is the tar header (see below) // entry.body is the content body (might be an empty async iterable) @@ -84,7 +82,35 @@ await pipe( The tar archive is streamed sequentially, meaning you **must** drain each entry's body as you get them or else the main extract stream will receive backpressure and stop reading. -Note that the body stream yields [`BufferList`](https://npm.im/bl) objects **not** `Buffer`s. +Note that the body stream yields [`Uint8ArrayList`](https://npm.im/uint8arraylist) objects **not** `Uint8Arrays`s. + +## Example - Modifying existing tarballs + +Using tar-stream it is easy to rewrite paths / change modes etc in an existing tarball. + +```TypeScript +import Tar from 'it-tar' +import { pipe } from 'it-pipe' +// @ts-expect-error no types +import { sink } from 'stream-to-it' +import fs from 'node:fs' +import path from 'node:path' + +await pipe( + fs.createReadStream('./old-tarball.tar'), + Tar.extract(), + async function * (source) { + for await (const entry of source) { + // let's prefix all names with 'tmp' + entry.header.name = path.join('tmp', entry.header.name) + // write the new entry to the pack stream + yield entry + } + }, + Tar.pack(), + sink(fs.createWriteStream('./new-tarball.tar')) +) +``` #### Headers @@ -110,48 +136,29 @@ Most of these values can be found by stat'ing a file. } ``` -## Modifying existing tarballs - -Using tar-stream it is easy to rewrite paths / change modes etc in an existing tarball. - -```js -const Tar = require('it-tar') -import { pipe } from 'it-pipe' -const toIterable = require('stream-to-it') - -await pipe( - fs.createReadStream('./old-tarball.tar'), - Tar.extract(), - async function * (source) { - for await (const entry of source) { - // let's prefix all names with 'tmp' - entry.header.name = path.join('tmp', entry.header.name) - // write the new entry to the pack stream - yield entry - } - }, - Tar.pack(), - toIterable.sink(fs.createWriteStream('./new-tarball.tar')) -) -``` - ## Related - [`it-pipe`](https://www.npmjs.com/package/it-pipe) Utility to "pipe" async iterables together - [`it-reader`](https://www.npmjs.com/package/it-reader) Read an exact number of bytes from a binary (async) iterable - [`stream-to-it`](https://www.npmjs.com/package/stream-to-it) Convert Node.js streams to streaming iterables -## Contribute +# Install + +```console +$ npm i it-tar +``` + +# API Docs -Feel free to dive in! [Open an issue](https://github.com/alanshaw/it-tar/issues/new) or submit PRs. +- -## License +# License Licensed under either of - Apache 2.0, ([LICENSE-APACHE](LICENSE-APACHE) / ) - MIT ([LICENSE-MIT](LICENSE-MIT) / ) -## Contribution +# Contribution Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. diff --git a/package.json b/package.json index a110dd5..96be6b3 100644 --- a/package.json +++ b/package.json @@ -12,6 +12,10 @@ "bugs": { "url": "https://github.com/alanshaw/it-tar/issues" }, + "publishConfig": { + "access": "public", + "provenance": true + }, "keywords": [ "extract", "generate", @@ -28,15 +32,11 @@ "tar", "tarball" ], - "engines": { - "node": ">=16.0.0", - "npm": ">=7.0.0" - }, "type": "module", "types": "./dist/src/index.d.ts", "files": [ "src", - "dist/src", + "dist", "!dist/test", "!**/*.tsbuildinfo" ], @@ -49,6 +49,7 @@ "eslintConfig": { "extends": "ipfs", "parserOptions": { + "project": true, "sourceType": "module" } }, @@ -140,7 +141,9 @@ "scripts": { "clean": "aegir clean", "lint": "aegir lint", + "docs": "aegir docs", "dep-check": "aegir dep-check", + "doc-check": "aegir doc-check", "build": "aegir build", "postbuild": "cp ./test/fixtures/*.tar ./test/fixtures/*.tar.gz ./test/fixtures/*.tgz ./dist/test/fixtures", "test": "aegir test -t node", @@ -151,16 +154,19 @@ "dependencies": { "iso-constants": "^0.1.2", "it-reader": "^6.0.1", - "it-stream-types": "^1.0.4", + "it-stream-types": "^2.0.1", "it-to-buffer": "^4.0.5", "p-defer": "^4.0.0", "uint8arraylist": "^2.3.2", "uint8arrays": "^5.0.2" }, "devDependencies": { - "aegir": "^37.5.1", - "concat-stream": "^2.0.0", + "aegir": "^42.2.4", "it-pipe": "^3.0.1", "stream-to-it": "^0.2.0" + }, + "engines": { + "node": ">=16.0.0", + "npm": ">=7.0.0" } } diff --git a/src/extract-headers.ts b/src/extract-headers.ts index dc311ef..44a3d9f 100644 --- a/src/extract-headers.ts +++ b/src/extract-headers.ts @@ -1,8 +1,8 @@ import { Uint8ArrayList, isUint8ArrayList } from 'uint8arraylist' -import { SupportedEncodings, toString as uint8ArrayToString } from 'uint8arrays/to-string' -import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { compare as uint8ArrayCompare } from 'uint8arrays/compare' -import type { TarEntryHeader } from '.' +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' +import { type SupportedEncodings, toString as uint8ArrayToString } from 'uint8arrays/to-string' +import type { EntryType, TarEntryHeader } from './index.js' const ZERO_OFFSET = '0'.charCodeAt(0) const USTAR_MAGIC = uint8ArrayFromString('ustar\x00', 'binary') @@ -11,7 +11,7 @@ const GNU_VER = uint8ArrayFromString('\x20\x00', 'binary') const MAGIC_OFFSET = 257 const VERSION_OFFSET = 263 -const clamp = function (index: number, len: number, defaultValue: number) { +const clamp = function (index: number, len: number, defaultValue: number): number { if (typeof index !== 'number') return defaultValue index = ~~index // Coerce to integer. if (index >= len) return len @@ -21,7 +21,7 @@ const clamp = function (index: number, len: number, defaultValue: number) { return 0 } -const toType = function (flag: number) { +const toType = function (flag: number): EntryType | undefined { switch (flag) { case 0: return 'file' @@ -53,14 +53,14 @@ const toType = function (flag: number) { } } -const indexOf = function (block: Uint8ArrayList, num: number, offset: number, end: number) { +const indexOf = function (block: Uint8ArrayList, num: number, offset: number, end: number): number { for (; offset < end; offset++) { if (block.get(offset) === num) return offset } return end } -const cksum = function (block: Uint8ArrayList) { +const cksum = function (block: Uint8ArrayList): number { let sum = 8 * 32 for (let i = 0; i < 148; i++) sum += block.get(i) for (let j = 156; j < 512; j++) sum += block.get(j) @@ -133,16 +133,16 @@ const decodeOct = function (val: Uint8ArrayList, offset: number, length: number) } } -const decodeStr = function (val: Uint8ArrayList, offset: number, length: number, encoding?: SupportedEncodings) { +const decodeStr = function (val: Uint8ArrayList, offset: number, length: number, encoding?: SupportedEncodings): string { return uint8ArrayToString(val.subarray(offset, indexOf(val, 0, offset, offset + length)), encoding) } -export function decodeLongPath (buf: Uint8ArrayList | Uint8Array, encoding?: SupportedEncodings) { +export function decodeLongPath (buf: Uint8ArrayList | Uint8Array, encoding?: SupportedEncodings): string { const list = isUint8ArrayList(buf) ? buf : new Uint8ArrayList(buf) return decodeStr(list, 0, buf.length, encoding) } -export function decodePax (buf: Uint8ArrayList | Uint8Array, encoding?: SupportedEncodings) { +export function decodePax (buf: Uint8ArrayList | Uint8Array, encoding?: SupportedEncodings): Record { let list = isUint8ArrayList(buf) ? buf : new Uint8ArrayList(buf) const result: Record = {} @@ -221,17 +221,17 @@ export function decode (buf: Uint8ArrayList | Uint8Array, filenameEncoding?: Sup } return { - name: name, - mode: mode, - uid: uid, - gid: gid, - size: size, + name, + mode, + uid, + gid, + size, mtime: new Date(1000 * (mtime ?? 0)), - type: type, - linkname: linkname, - uname: uname, - gname: gname, - devmajor: devmajor, - devminor: devminor + type, + linkname, + uname, + gname, + devmajor, + devminor } } diff --git a/src/extract.ts b/src/extract.ts index d53106a..3681271 100644 --- a/src/extract.ts +++ b/src/extract.ts @@ -1,13 +1,13 @@ -import type { Source, Transform } from 'it-stream-types' import defer from 'p-defer' -import type { Uint8ArrayList } from 'uint8arraylist' -import type { SupportedEncodings } from 'uint8arrays/to-string' import * as Headers from './extract-headers.js' import { lteReader } from './lte-reader.js' -import type { LteReader } from './lte-reader.js' import type { TarEntry } from './index.js' +import type { LteReader } from './lte-reader.js' +import type { Source, Transform } from 'it-stream-types' +import type { Uint8ArrayList } from 'uint8arraylist' +import type { SupportedEncodings } from 'uint8arrays/to-string' -function getPadding (size: number) { +function getPadding (size: number): number { size &= 511 if (size !== 0) { @@ -17,7 +17,7 @@ function getPadding (size: number) { return 0 } -async function discardPadding (reader: LteReader, size: number) { +async function discardPadding (reader: LteReader, size: number): Promise { const overflow = getPadding(size) if (overflow > 0) { await reader.next(overflow) @@ -34,7 +34,7 @@ export interface Derp { body: Source } -export function extract (options: ExtractOptions = {}): Transform { +export function extract (options: ExtractOptions = {}): Transform, AsyncGenerator> { options.highWaterMark = options.highWaterMark ?? 1024 * 16 return async function * (source: Source) { // eslint-disable-line complexity diff --git a/src/index.ts b/src/index.ts index d94f40d..a50b6eb 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,3 +1,127 @@ +/** + * @packageDocumentation + * + * `it-tar` [packs](#packing) and [extracts](#extracts) tarballs. + * + * It implements USTAR with additional support for pax extended headers. It should be compatible with all popular tar distributions out there (gnutar, bsdtar etc) + * + * @example Packing + * + * To create a pack stream use `tar.pack()` and pipe entries to it. + * + * ```TypeScript + * import fs from 'node:fs' + * import Tar from 'it-tar' + * import { pipe } from 'it-pipe' + * // @ts-expect-error no types + * import { sink } from 'stream-to-it' + * + * await pipe( + * [ + * // add a file called my-test.txt with the content "Hello World!" + * { + * header: { name: 'my-test.txt' }, + * body: 'Hello World!' + * }, + * // add a file called my-stream-test.txt from a stream + * { + * header: { name: 'my-stream-test.txt', size: 11 }, + * body: fs.createReadStream('./my-stream-test.txt') + * } + * ], + * Tar.pack(), + * // pipe the pack stream somewhere + * sink(process.stdout) + * ) + * ``` + * + * @example Extracting + * + * To extract a stream use `tar.extract()` and pipe a [source iterable](https://gist.github.com/alanshaw/591dc7dd54e4f99338a347ef568d6ee9#source-it) to it. + * + * ```TypeScript + * import Tar from 'it-tar' + * import { pipe } from 'it-pipe' + * + * await pipe( + * [Uint8Array.from([0, 1, 2, 3, 4])], // An async iterable (for example a Node.js readable stream) + * Tar.extract(), + * async source => { + * for await (const entry of source) { + * // entry.header is the tar header (see below) + * // entry.body is the content body (might be an empty async iterable) + * for await (const data of entry.body) { + * // do something with the data + * } + * } + * // all entries read + * } + * ) + * ``` + * + * The tar archive is streamed sequentially, meaning you **must** drain each entry's body as you get them or else the main extract stream will receive backpressure and stop reading. + * + * Note that the body stream yields [`Uint8ArrayList`](https://npm.im/uint8arraylist) objects **not** `Uint8Arrays`s. + * + * @example Modifying existing tarballs + * + * Using tar-stream it is easy to rewrite paths / change modes etc in an existing tarball. + * + * ```TypeScript + * import Tar from 'it-tar' + * import { pipe } from 'it-pipe' + * // @ts-expect-error no types + * import { sink } from 'stream-to-it' + * import fs from 'node:fs' + * import path from 'node:path' + * + * await pipe( + * fs.createReadStream('./old-tarball.tar'), + * Tar.extract(), + * async function * (source) { + * for await (const entry of source) { + * // let's prefix all names with 'tmp' + * entry.header.name = path.join('tmp', entry.header.name) + * // write the new entry to the pack stream + * yield entry + * } + * }, + * Tar.pack(), + * sink(fs.createWriteStream('./new-tarball.tar')) + * ) + * ``` + * + * #### Headers + * + * The header object using in `entry` should contain the following properties. + * Most of these values can be found by stat'ing a file. + * + * ```js + * { + * name: 'path/to/this/entry.txt', + * size: 1314, // entry size. defaults to 0 + * mode: 0644, // entry mode. defaults to to 0755 for dirs and 0644 otherwise + * mtime: new Date(), // last modified date for entry. defaults to now. + * type: 'file', // type of entry. defaults to file. can be: + * // file | link | symlink | directory | block-device + * // character-device | fifo | contiguous-file + * linkname: 'path', // linked file name + * uid: 0, // uid of entry owner. defaults to 0 + * gid: 0, // gid of entry owner. defaults to 0 + * uname: 'maf', // uname of entry owner. defaults to null + * gname: 'staff', // gname of entry owner. defaults to null + * devmajor: 0, // device major version. defaults to 0 + * devminor: 0 // device minor version. defaults to 0 + * } + * ``` + * + * ## Related + * + * - [`it-pipe`](https://www.npmjs.com/package/it-pipe) Utility to "pipe" async iterables together + * - [`it-reader`](https://www.npmjs.com/package/it-reader) Read an exact number of bytes from a binary (async) iterable + * - [`stream-to-it`](https://www.npmjs.com/package/stream-to-it) Convert Node.js streams to streaming iterables + */ + import type { Source } from 'it-stream-types' export { extract } from './extract.js' @@ -29,7 +153,7 @@ export interface TarEntry { export interface TarImportCandidate { header: Partial & { name: string } - body?: Source | Uint8Array + body?: Source | Uint8Array | string } export interface ExtractOptions { diff --git a/src/lte-reader.ts b/src/lte-reader.ts index 1979c30..4ac3500 100644 --- a/src/lte-reader.ts +++ b/src/lte-reader.ts @@ -1,10 +1,10 @@ -import { isUint8ArrayList, Uint8ArrayList } from 'uint8arraylist' import { reader } from 'it-reader' +import { isUint8ArrayList, Uint8ArrayList } from 'uint8arraylist' import type { Source } from 'it-stream-types' export interface LteReader extends AsyncIterator { - nextLte: (bytes: number) => Promise> - return: () => Promise> + nextLte(bytes: number): Promise> + return(): Promise> } export function lteReader (source: Source): LteReader { @@ -23,7 +23,7 @@ export function lteReader (source: Source): LteReader { overflow = overflow.sublist(bytes) } else if (overflow.length < bytes) { const { value: nextValue, done } = await input.next(bytes - overflow.length) - if (done === true ?? nextValue == null) { + if (done === true || nextValue == null) { throw Object.assign( new Error(`stream ended before ${bytes - overflow.length} bytes became available`), { code: 'ERR_UNDER_READ' } @@ -44,7 +44,7 @@ export function lteReader (source: Source): LteReader { return result } - return await input.next(bytes) + return input.next(bytes) }, async nextLte (bytes: number): Promise> { const { done, value } = await lteReader.next() @@ -71,7 +71,7 @@ export function lteReader (source: Source): LteReader { return { done: false, value: list.sublist(0, bytes) } }, async return () { - return await input.return() + return input.return() } } diff --git a/src/pack-headers.ts b/src/pack-headers.ts index efa037d..2fce9a4 100644 --- a/src/pack-headers.ts +++ b/src/pack-headers.ts @@ -10,7 +10,7 @@ const MASK = parseInt('7777', 8) const MAGIC_OFFSET = 257 const VERSION_OFFSET = 263 -const toTypeflag = function (flag?: EntryType) { +const toTypeflag = function (flag?: EntryType): number { switch (flag) { case 'file': return 0 @@ -35,7 +35,7 @@ const toTypeflag = function (flag?: EntryType) { } } -const cksum = function (block: Uint8Array) { +const cksum = function (block: Uint8Array): number { let sum = 8 * 32 for (let i = 0; i < 148; i++) sum += block[i] for (let j = 156; j < 512; j++) sum += block[j] @@ -52,7 +52,7 @@ const encodeOct = function (val: number, n: number): Uint8Array { return uint8ArrayFromString(ZEROS.slice(0, n - str.length) + str + ' ') } -const addLength = function (str: string) { +const addLength = function (str: string): string { const len = uint8ArrayFromString(str).byteLength let digits = Math.floor(Math.log(len) / Math.log(10)) + 1 @@ -63,7 +63,7 @@ const addLength = function (str: string) { return `${len + digits}${str}` } -export function encodePax (opts: TarEntryHeader) { // TODO: encode more stuff in pax +export function encodePax (opts: TarEntryHeader): Uint8Array { // TODO: encode more stuff in pax let result = '' if (opts.name != null) { result += addLength(' path=' + opts.name + '\n') diff --git a/src/pack.ts b/src/pack.ts index 000e730..351124b 100644 --- a/src/pack.ts +++ b/src/pack.ts @@ -2,18 +2,18 @@ import isoConstants from 'iso-constants' import toBuffer from 'it-to-buffer' import { isUint8ArrayList, Uint8ArrayList } from 'uint8arraylist' -import type { TarEntryHeader, TarImportCandidate } from './index.js' -import * as Headers from './pack-headers.js' -import type { Source, Transform } from 'it-stream-types' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { toString as uint8ArrayToString } from 'uint8arrays/to-string' +import * as Headers from './pack-headers.js' +import type { EntryType, TarEntryHeader, TarImportCandidate } from './index.js' +import type { Source, Transform } from 'it-stream-types' const { S_IFMT, S_IFBLK, S_IFCHR, S_IFDIR, S_IFIFO, S_IFLNK } = isoConstants const DMODE = parseInt('755', 8) const FMODE = parseInt('644', 8) const END_OF_TAR = new Uint8Array(1024) -function modeToType (mode: number = 0) { +function modeToType (mode: number = 0): EntryType { switch (mode & S_IFMT) { case S_IFBLK: return 'block-device' case S_IFCHR: return 'character-device' @@ -24,7 +24,7 @@ function modeToType (mode: number = 0) { } } -function getPadding (size: number) { +function getPadding (size: number): Uint8Array { size &= 511 if (size !== 0) { @@ -34,7 +34,7 @@ function getPadding (size: number) { return new Uint8Array(0) } -function encode (header: TarEntryHeader) { +function encode (header: TarEntryHeader): Uint8Array { if (header.pax == null) { const encoded = Headers.encode(header) @@ -45,7 +45,7 @@ function encode (header: TarEntryHeader) { return encodePax(header) } -function encodePax (header: TarEntryHeader) { +function encodePax (header: TarEntryHeader): Uint8Array { const paxHeader = Headers.encodePax(header) const newHeader: TarEntryHeader = { @@ -71,7 +71,7 @@ function encodePax (header: TarEntryHeader) { ).subarray() } -export function pack (): Transform { +export function pack (): Transform, AsyncGenerator> { return async function * (source: Source) { // eslint-disable-line complexity for await (let { header: partialHeader, body } of source) { const header: TarEntryHeader = { diff --git a/test/extract.spec.ts b/test/extract.spec.ts index 918b715..7f84fbf 100644 --- a/test/extract.spec.ts +++ b/test/extract.spec.ts @@ -1,10 +1,10 @@ -import { expect } from 'aegir/chai' -import * as Tar from '../src/index.js' -import * as Fixtures from './fixtures/index.js' import Fs from 'fs' +import { expect } from 'aegir/chai' import { pipe } from 'it-pipe' -import { toString as uint8ArrayToString } from 'uint8arrays/to-string' import toBuffer from 'it-to-buffer' +import { toString as uint8ArrayToString } from 'uint8arrays/to-string' +import * as Tar from '../src/index.js' +import * as Fixtures from './fixtures/index.js' const clamp = (index: any, len: number, defaultValue: number): number => { if (typeof index !== 'number') { diff --git a/test/pack.spec.ts b/test/pack.spec.ts index e7cd0fa..4578604 100644 --- a/test/pack.spec.ts +++ b/test/pack.spec.ts @@ -1,10 +1,10 @@ -import { expect } from 'aegir/chai' -import * as Tar from '../src/index.js' -import * as Fixtures from './fixtures/index.js' import Fs from 'fs' +import { expect } from 'aegir/chai' import { pipe } from 'it-pipe' import toBuffer from 'it-to-buffer' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' +import * as Tar from '../src/index.js' +import * as Fixtures from './fixtures/index.js' describe('pack', () => { it('one-file', async () => { @@ -24,7 +24,7 @@ describe('pack', () => { const data = await pipe( entries, Tar.pack(), - async (source) => await toBuffer(source) + async (source) => toBuffer(source) ) expect(data.length & 511).to.equal(0) @@ -60,7 +60,7 @@ describe('pack', () => { const data = await pipe( entries, Tar.pack(), - async (source) => await toBuffer(source) + async (source) => toBuffer(source) ) expect(data.length & 511).to.equal(0) @@ -85,7 +85,7 @@ describe('pack', () => { const data = await pipe( entries, Tar.pack(), - async (source) => await toBuffer(source) + async (source) => toBuffer(source) ) expect(data.length & 511).to.equal(0) @@ -122,7 +122,7 @@ describe('pack', () => { const data = await pipe( entries, Tar.pack(), - async (source) => await toBuffer(source) + async (source) => toBuffer(source) ) expect(data.length & 511).to.equal(0) @@ -147,7 +147,7 @@ describe('pack', () => { const data = await pipe( entries, Tar.pack(), - async (source) => await toBuffer(source) + async (source) => toBuffer(source) ) expect(data.length & 511).to.equal(0) @@ -171,7 +171,7 @@ describe('pack', () => { const data = await pipe( entries, Tar.pack(), - async (source) => await toBuffer(source) + async (source) => toBuffer(source) ) expect(data.length & 511).to.equal(0) @@ -196,7 +196,7 @@ describe('pack', () => { const data = await pipe( entries, Tar.pack(), - async (source) => await toBuffer(source) + async (source) => toBuffer(source) ) expect(data.length & 511).to.equal(0) diff --git a/test/slow.spec.ts b/test/slow.spec.ts index fd8f7d2..9f8663e 100644 --- a/test/slow.spec.ts +++ b/test/slow.spec.ts @@ -1,11 +1,11 @@ +import Fs from 'fs' +import Zlib from 'zlib' import { expect } from 'aegir/chai' +import { pipe } from 'it-pipe' +// @ts-expect-error no types +import { transform } from 'stream-to-it' import * as Tar from '../src/index.js' import * as Fixtures from './fixtures/index.js' -import Zlib from 'zlib' -import Fs from 'fs' -// @ts-expect-error no types -import toIterable from 'stream-to-it' -import { pipe } from 'it-pipe' describe('huge', function () { this.timeout(120 * 1000) @@ -17,7 +17,7 @@ describe('huge', function () { await pipe( Fs.createReadStream(Fixtures.HUGE), - toIterable.transform(Zlib.createGunzip()), + transform(Zlib.createGunzip()), Tar.extract(), async source => { for await (const entry of source) { diff --git a/typedoc.json b/typedoc.json new file mode 100644 index 0000000..f599dc7 --- /dev/null +++ b/typedoc.json @@ -0,0 +1,5 @@ +{ + "entryPoints": [ + "./src/index.ts" + ] +}