Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@
"axios": "^0.30.0",
"cafe-utility": "^32.2.0",
"debug": "^4.4.1",
"hash-wasm": "^4.12.0",
"isomorphic-ws": "^4.0.1",
"semver": "^7.3.5",
"ws": "^8.7.0"
Expand Down
111 changes: 111 additions & 0 deletions src/bee.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import { areAllSequentialFeedsUpdateRetrievable } from './feed/retrievable'
import * as bytes from './modules/bytes'
import * as bzz from './modules/bzz'
import * as chunk from './modules/chunk'
import * as downloadStream from './modules/download-stream'
import * as balance from './modules/debug/balance'
import * as chequebook from './modules/debug/chequebook'
import * as connectivity from './modules/debug/connectivity'
Expand Down Expand Up @@ -310,6 +311,55 @@ export class Bee {
return bytes.downloadReadable(this.getRequestOptionsForCall(requestOptions), new ResourceLocator(resource), options)
}

/**
* Downloads raw data as a streaming ReadableStream by fetching chunks in parallel.
*
* This method is optimized for downloading large files as it:
* - Detects encryption automatically (64-byte encrypted references)
* - Fetches chunks in parallel with configurable concurrency
* - Streams data without loading entire file into memory
* - Supports progress callbacks
*
* Use this for downloading data uploaded with {@link uploadData} when you need:
* - Progress tracking
* - Better performance for large files
* - Lower memory usage
*
* @param resource Swarm reference (32 bytes plain, 64 bytes encrypted), Swarm CID, or ENS domain
* @param options Options including onDownloadProgress callback and concurrency
* @param requestOptions Options for making requests, such as timeouts, custom HTTP agents, headers, etc.
*
* @returns ReadableStream of file data
*
* @example
* ```typescript
* const stream = await bee.downloadDataStreaming(reference, {
* onDownloadProgress: ({ total, processed }) => {
* console.log(`Downloaded ${processed}/${total} chunks`)
* },
* concurrency: 32
* })
* ```
*
* @see [Bee docs - Upload and download](https://docs.ethswarm.org/docs/develop/access-the-swarm/upload-and-download)
*/
async downloadDataStreaming(
resource: Reference | Uint8Array | string,
options?: downloadStream.DownloadStreamOptions,
requestOptions?: BeeRequestOptions,
): Promise<ReadableStream<Uint8Array>> {
if (options) {
options = { ...prepareDownloadOptions(options), ...options }
}

return downloadStream.downloadDataStreaming(
this,
resource,
options,
this.getRequestOptionsForCall(requestOptions),
)
}

/**
* Uploads a chunk to the network.
*
Expand Down Expand Up @@ -553,6 +603,67 @@ export class Bee {
return bzz.downloadFileReadable(this.getRequestOptionsForCall(requestOptions), reference, path, options)
}

/**
* Downloads a file from a manifest as a streaming ReadableStream by fetching chunks in parallel.
*
* This method is optimized for downloading files from collections/manifests:
* - Downloads and parses the manifest
* - Looks up the file at the specified path
* - Detects encryption automatically (64-byte encrypted references)
* - Fetches chunks in parallel with configurable concurrency
* - Returns file metadata (content-type, filename) along with stream
* - Supports progress callbacks
*
* Use this for downloading files from collections when you need:
* - Progress tracking
* - Better performance for large files
* - Lower memory usage
*
* @param resource Swarm manifest reference (32 bytes plain, 64 bytes encrypted), Swarm CID, or ENS domain
* @param path Path within the manifest (e.g., 'index.html', 'images/logo.png')
* @param options Options including onDownloadProgress callback and concurrency
* @param requestOptions Options for making requests, such as timeouts, custom HTTP agents, headers, etc.
*
* @returns FileData with ReadableStream and metadata
*
* @example
* ```typescript
* const file = await bee.downloadFileStreaming(manifestRef, 'document.pdf', {
* onDownloadProgress: ({ total, processed }) => {
* console.log(`Progress: ${(processed/total*100).toFixed(1)}%`)
* },
* concurrency: 32
* })
*
* console.log('Content-Type:', file.contentType)
* console.log('Filename:', file.name)
*
* // Use the stream
* const reader = file.data.getReader()
* ```
*
* @see [Bee docs - Upload and download](https://docs.ethswarm.org/docs/develop/access-the-swarm/upload-and-download)
* @see [Bee API reference - `GET /bzz`](https://docs.ethswarm.org/api/#tag/BZZ/paths/~1bzz~1%7Breference%7D~1%7Bpath%7D/get)
*/
async downloadFileStreaming(
resource: Reference | Uint8Array | string,
path = '',
options?: downloadStream.DownloadStreamOptions,
requestOptions?: BeeRequestOptions,
): Promise<FileData<ReadableStream<Uint8Array>>> {
if (options) {
options = { ...prepareDownloadOptions(options), ...options }
}

return downloadStream.downloadFileStreaming(
this,
resource,
path,
options,
this.getRequestOptionsForCall(requestOptions),
)
}

/**
* Upload collection of files to a Bee node
*
Expand Down
19 changes: 18 additions & 1 deletion src/chunk/bmt.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,5 +38,22 @@ function calculateBmtRootHash(payload: Uint8Array): Uint8Array {
const input = new Uint8Array(MAX_CHUNK_PAYLOAD_SIZE)
input.set(payload)

return Binary.log2Reduce(Binary.partition(input, SEGMENT_SIZE), (a, b) => Binary.keccak256(Binary.concatBytes(a, b)))
// Build BMT by hashing pairs of segments level by level
let currentLevel = Binary.partition(input, SEGMENT_SIZE)

while (currentLevel.length > 1) {
const nextLevel: Uint8Array[] = []

for (let i = 0; i < currentLevel.length; i += 2) {
const left = currentLevel[i]
const right = currentLevel[i + 1]
const combined = Binary.concatBytes(left, right)
const hash = Binary.keccak256(combined)
nextLevel.push(hash)
}

currentLevel = nextLevel
}

return currentLevel[0]
}
117 changes: 117 additions & 0 deletions src/chunk/encrypted-cac.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
// Copyright 2024 The Swarm Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.

import { Binary } from 'cafe-utility'
import { Bytes } from '../utils/bytes'
import { Reference, Span } from '../utils/typed-bytes'
import { calculateChunkAddress } from './bmt'
import { MIN_PAYLOAD_SIZE, MAX_PAYLOAD_SIZE } from './cac'
import { newChunkEncrypter, decryptChunkData, KEY_LENGTH, type Key } from './encryption'

const ENCODER = new TextEncoder()

/**
* Encrypted chunk interface
*
* The reference includes both the chunk address and the encryption key (64 bytes total)
*/
export interface EncryptedChunk {
readonly data: Uint8Array // encrypted span + encrypted data
readonly encryptionKey: Key // 32 bytes
span: Span // original (unencrypted) span
payload: Bytes // encrypted payload
address: Reference // BMT hash of encrypted data
reference: Reference // 64 bytes: address (32) + encryption key (32)
}

/**
* Creates an encrypted content addressed chunk
*
* Process:
* 1. Create chunk with span + payload
* 2. Encrypt the chunk data
* 3. Calculate BMT hash on encrypted data
* 4. Return reference = address + encryption key (64 bytes)
*
* @param payloadBytes the data to be stored in the chunk
* @param encryptionKey optional encryption key (if not provided, a random key will be generated)
*/
export function makeEncryptedContentAddressedChunk(
payloadBytes: Uint8Array | string,
encryptionKey?: Key,
): EncryptedChunk {
if (!(payloadBytes instanceof Uint8Array)) {
payloadBytes = ENCODER.encode(payloadBytes)
}

if (payloadBytes.length < MIN_PAYLOAD_SIZE || payloadBytes.length > MAX_PAYLOAD_SIZE) {
throw new RangeError(
`payload size ${payloadBytes.length} exceeds limits [${MIN_PAYLOAD_SIZE}, ${MAX_PAYLOAD_SIZE}]`,
)
}

// Create the original chunk data (span + payload)
const span = Span.fromBigInt(BigInt(payloadBytes.length))
const chunkData = Binary.concatBytes(span.toUint8Array(), payloadBytes)

// Encrypt the chunk
const encrypter = newChunkEncrypter()
const { key, encryptedSpan, encryptedData } = encrypter.encryptChunk(chunkData, encryptionKey)

// Concatenate encrypted span and data
const encryptedChunkData = Binary.concatBytes(encryptedSpan, encryptedData)

// Calculate BMT address on encrypted data
const address = calculateChunkAddress(encryptedChunkData)

// Create 64-byte reference: address (32 bytes) + encryption key (32 bytes)
const reference = new Reference(Binary.concatBytes(address.toUint8Array(), key))

return {
data: encryptedChunkData,
encryptionKey: key,
span,
payload: new Bytes(encryptedChunkData.slice(Span.LENGTH)),
address,
reference,
}
}

/**
* Decrypts an encrypted chunk given the encryption key
*
* @param encryptedChunkData The encrypted chunk data (span + payload)
* @param encryptionKey The 32-byte encryption key
*/
export function decryptEncryptedChunk(encryptedChunkData: Uint8Array, encryptionKey: Key): Uint8Array {
return decryptChunkData(encryptionKey, encryptedChunkData)
}

/**
* Extracts encryption key from a 64-byte encrypted reference
*
* @param reference 64-byte reference (address + key)
*/
export function extractEncryptionKey(reference: Reference): Key {
const refBytes = reference.toUint8Array()
if (refBytes.length !== 64) {
throw new Error(`Invalid encrypted reference length: ${refBytes.length}, expected 64`)
}

return refBytes.slice(32, 64)
}

/**
* Extracts the chunk address from a 64-byte encrypted reference
*
* @param reference 64-byte reference (address + key)
*/
export function extractChunkAddress(reference: Reference): Reference {
const refBytes = reference.toUint8Array()
if (refBytes.length !== 64) {
throw new Error(`Invalid encrypted reference length: ${refBytes.length}, expected 64`)
}

return new Reference(refBytes.slice(0, 32))
}
Loading