From f996c5601334f494a905478754dd0c3dbc03e338 Mon Sep 17 00:00:00 2001 From: ff6347 Date: Tue, 3 May 2022 20:07:06 +0200 Subject: [PATCH] fix(HTTP): Remove max number of records Since we also can create a payload that only has tow records but 1024*1024 bytes the number 1000 is totally random --- src/integrations/http.batch-insert.test.ts | 116 +++++++++++++++++++-- src/integrations/http.ts | 12 +-- src/lib/env.ts | 2 - 3 files changed, 114 insertions(+), 16 deletions(-) diff --git a/src/integrations/http.batch-insert.test.ts b/src/integrations/http.batch-insert.test.ts index 01a8d360..65a5006f 100644 --- a/src/integrations/http.batch-insert.test.ts +++ b/src/integrations/http.batch-insert.test.ts @@ -121,7 +121,7 @@ describe("tests for the http integration", () => { // end boilerplate }); - test(`should allow only ${recordsMaxLength} records`, async () => { + test(`should allow around 1 mb of payload`, async () => { // start boilerplate setup test const server = buildServer(buildServerOpts); const user = await signupUser(); @@ -135,8 +135,109 @@ describe("tests for the http integration", () => { }); // end boilerplate const records = await createRecordsPayload({ - amount: recordsMaxLength + 1, + amount: 1, }); + const oneMBInBytes = 1048576; // 1024 * 1024 + + let jsonString = JSON.stringify(records); + const bytesOfSkeleton = Buffer.byteLength(jsonString, "utf8"); + const bytesOfMeasurements = oneMBInBytes - bytesOfSkeleton; + + const measurements = []; + for (let i = 0; i < bytesOfMeasurements / 2 - 10; i++) { + measurements.push(1); + } + + records[0].measurements = measurements; + jsonString = JSON.stringify(records); + // console.log(bytesOfSkeleton, Buffer.byteLength(jsonString, "utf8")); + + const responseArray = await server.inject({ + method: "POST", + url: `/api/v${apiVersion}/sensors/${sensor.id}/records`, + payload: { records }, + headers: { + Authorization: `Bearer ${authToken}`, + }, + }); + expect(responseArray.statusCode).toBe(201); + + // start boilerplate delete user + await deleteUser(user.token); + // end boilerplate + }); + + test(`should reject large payload with 413`, async () => { + // start boilerplate setup test + const server = buildServer(buildServerOpts); + const user = await signupUser(); + + const authToken = await createAuthToken({ + server, + userToken: user.token, + }); + const sensor = await createSensor({ + user_id: user.id, + }); + // end boilerplate + const records = await createRecordsPayload({ + amount: 1, + }); + const oneMBInBytes = 1048576; // 1024 * 1024 + + let jsonString = JSON.stringify(records); + const bytesOfSkeleton = Buffer.byteLength(jsonString, "utf8"); + const bytesOfMeasurements = oneMBInBytes - bytesOfSkeleton; + + const measurements = []; + for (let i = 0; i < bytesOfMeasurements; i++) { + measurements.push(1); + } + + records[0].measurements = measurements; + jsonString = JSON.stringify(records); + // console.log(bytesOfSkeleton, Buffer.byteLength(jsonString, "utf8")); + + const responseArray = await server.inject({ + method: "POST", + url: `/api/v${apiVersion}/sensors/${sensor.id}/records`, + payload: { records }, + headers: { + Authorization: `Bearer ${authToken}`, + }, + }); + expect(responseArray.statusCode).toBe(413); + expect(responseArray.json()).toMatchInlineSnapshot(` + Object { + "code": "FST_ERR_CTP_BODY_TOO_LARGE", + "error": "Payload Too Large", + "message": "Request body is too large", + "statusCode": 413, + } + `); + // start boilerplate delete user + await deleteUser(user.token); + // end boilerplate + }); + test(`should reject records missing measurements`, async () => { + // start boilerplate setup test + const server = buildServer(buildServerOpts); + const user = await signupUser(); + + const authToken = await createAuthToken({ + server, + userToken: user.token, + }); + const sensor = await createSensor({ + user_id: user.id, + }); + // end boilerplate + const records = await createRecordsPayload({ + amount: 1000, + }); + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + //@ts-ignore + records[5].measurements = undefined; const responseArray = await server.inject({ method: "POST", url: `/api/v${apiVersion}/sensors/${sensor.id}/records`, @@ -149,7 +250,7 @@ describe("tests for the http integration", () => { expect(responseArray.json()).toMatchInlineSnapshot(` Object { "error": "Bad Request", - "message": "body/records should NOT have more than 1000 items, body should match \\"then\\" schema", + "message": "body/records/5 should have required property 'measurements', body should match \\"then\\" schema", "statusCode": 400, } `); @@ -158,7 +259,7 @@ describe("tests for the http integration", () => { // end boilerplate }); - test(`should reject corrupt records`, async () => { + test(`should reject records missing recorded_at`, async () => { // start boilerplate setup test const server = buildServer(buildServerOpts); const user = await signupUser(); @@ -176,7 +277,10 @@ describe("tests for the http integration", () => { }); // eslint-disable-next-line @typescript-eslint/ban-ts-comment //@ts-ignore - records[5].measurements = undefined; + records[5].recorded_at = undefined; + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + //@ts-ignore + records[100].recorded_at = undefined; const responseArray = await server.inject({ method: "POST", url: `/api/v${apiVersion}/sensors/${sensor.id}/records`, @@ -189,7 +293,7 @@ describe("tests for the http integration", () => { expect(responseArray.json()).toMatchInlineSnapshot(` Object { "error": "Bad Request", - "message": "body/records/5 should have required property 'measurements', body should match \\"then\\" schema", + "message": "body/records/5 should have required property 'recorded_at', body/records/100 should have required property 'recorded_at', body should match \\"then\\" schema", "statusCode": 400, } `); diff --git a/src/integrations/http.ts b/src/integrations/http.ts index 4c54415a..97190354 100644 --- a/src/integrations/http.ts +++ b/src/integrations/http.ts @@ -5,7 +5,7 @@ import { definitions } from "@technologiestiftung/stadtpuls-supabase-definitions import { AuthToken } from "../common/jwt"; import S from "fluent-json-schema"; import config from "config"; -import { logLevel, recordsMaxLength } from "../lib/env"; +import { logLevel } from "../lib/env"; import { isValidDate } from "../lib/date-utils"; declare module "fastify" { @@ -55,13 +55,9 @@ const recordsSchema = S.object() .additionalProperties(true) .prop( "records", - S.array() - .items( - S.object() - .required(["measurements", "recorded_at"]) - .extend(recordSchema) - ) - .maxItems(recordsMaxLength) + S.array().items( + S.object().required(["measurements", "recorded_at"]).extend(recordSchema) + ) ); const postHTTPBodySchema = S.object() diff --git a/src/lib/env.ts b/src/lib/env.ts index a5031f65..0413603f 100644 --- a/src/lib/env.ts +++ b/src/lib/env.ts @@ -14,7 +14,6 @@ const stage = env.require("STAGE"); const logFlareApiKey = env.get("LOG_FLARE_API_KEY"); const logFlareSourceToken = env.get("LOG_FLARE_SOURCE_TOKEN"); -const recordsMaxLength = 1000; const logLevels = ["info", "error", "debug", "fatal", "warn", "trace"]; const supabaseMaxRows = parseInt(env.require("SUPABASE_MAX_ROWS"), 10); @@ -43,5 +42,4 @@ export { supabaseMaxRows, supabaseServiceRoleKey, supabaseUrl, - recordsMaxLength, };