Skip to content

Commit

Permalink
Add support for downloading binary files using S3Client.getObject
Browse files Browse the repository at this point in the history
This commit adds an `additionalHeaders` parameter to S3Client.getObject.
The operation additionally receives support for the `Accept` header.
When the header is provided with the value `application/octet-stream`
the operation will, as a result, treat the response as binary content
and return an ArrayBuffer.

We decided to opt for the header approach, as it doesn't leak nor depend
on k6 interfaces, and adopts a standard HTTP behavior, indicating to
the server the type of content we expect in return, through the Accept
header.
  • Loading branch information
oleiade committed Jul 30, 2024
1 parent a2560c3 commit 188d061
Show file tree
Hide file tree
Showing 2 changed files with 38 additions and 5 deletions.
28 changes: 23 additions & 5 deletions src/internal/s3.ts
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,11 @@ export class S3Client extends AWSClient {
* @throws {S3ServiceError}
* @throws {InvalidSignatureError}
*/
async getObject(bucketName: string, objectKey: string): Promise<S3Object> {
async getObject(
bucketName: string,
objectKey: string,
additionalHeaders: object = {}
): Promise<S3Object> {
// Prepare request
const method = 'GET'

Expand All @@ -177,13 +181,27 @@ export class S3Client extends AWSClient {
method: method,
endpoint: this.endpoint,
path: encodeURI(`/${bucketName}/${objectKey}`),
headers: {},
headers: {
...additionalHeaders,
},
},
{}
)

// If the Accept header is set to 'application/octet-stream', we want to
// return the response as binary data.
let responseType: ResponseType = 'text'
if (
'Accept' in additionalHeaders &&
additionalHeaders['Accept'] !== undefined &&
additionalHeaders['Accept'] === 'application/octet-stream'
) {
responseType = 'binary'
}

const res = await http.asyncRequest(method, signedRequest.url, null, {
headers: signedRequest.headers,
responseType: responseType as ResponseType,
})
this._handle_error('GetObject', res)

Expand Down Expand Up @@ -537,7 +555,7 @@ export class S3Object {
etag: string
size: number
storageClass: StorageClass
data?: string | bytes | null
data?: string | ArrayBuffer | null

/**
* Create an S3 Object
Expand All @@ -547,15 +565,15 @@ export class S3Object {
* @param {string} etag - S3 object's etag
* @param {number} size - S3 object's size
* @param {StorageClass} storageClass - S3 object's storage class
* @param {string | bytes | null} data=null - S3 Object's data
* @param {string | ArrayBuffer | null} data=null - S3 Object's data
*/
constructor(
key: string,
lastModified: number,
etag: string,
size: number,
storageClass: StorageClass,
data?: string | bytes | null
data?: string | ArrayBuffer | null
) {
this.key = key
this.lastModified = lastModified
Expand Down
15 changes: 15 additions & 0 deletions tests/internal/s3.js
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,21 @@ export async function s3TestSuite(data) {
expect(getNonExistingObjectError).to.be.an.instanceOf(S3ServiceError)
})

await asyncDescribe('s3.getObject [binary]', async (expect) => {
// Act
const gotBinaryObject = await s3Client.getObject(
data.s3.testBucketName,
data.s3.testObjects[0].key,
{ Accept: 'application/octet-stream' }
)

// Assert
expect(gotBinaryObject).to.be.an('object')
expect(gotBinaryObject.key).to.equal(data.s3.testObjects[0].key)
expect(gotBinaryObject.data).to.be.an('ArrayBuffer')
expect(gotBinaryObject.data.byteLength).to.equal(data.s3.testObjects[0].body.length)
})

await asyncDescribe('s3.putObject', async (expect) => {
// Act
let putObectError
Expand Down

0 comments on commit 188d061

Please sign in to comment.