Skip to content

Commit 4168581

Browse files
jordanhunt22Convex, Inc.
authored and
Convex, Inc.
committed
Increase user read + write limits (#35974)
GitOrigin-RevId: b96639fca06e5050d16a5623909118f708773fb4
1 parent 461c5c7 commit 4168581

File tree

3 files changed

+13
-13
lines changed

3 files changed

+13
-13
lines changed

crates/common/src/knobs.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -210,11 +210,11 @@ pub static HTTP_SERVER_MAX_CONCURRENT_REQUESTS: LazyLock<usize> =
210210

211211
/// Max number of user writes in a transaction
212212
pub static TRANSACTION_MAX_NUM_USER_WRITES: LazyLock<usize> =
213-
LazyLock::new(|| env_config("TRANSACTION_MAX_NUM_USER_WRITES", 8192));
213+
LazyLock::new(|| env_config("TRANSACTION_MAX_NUM_USER_WRITES", 16000));
214214

215215
/// Max size of user writes in a transaction, in bytes
216216
pub static TRANSACTION_MAX_USER_WRITE_SIZE_BYTES: LazyLock<usize> = LazyLock::new(|| {
217-
env_config("TRANSACTION_MAX_USER_WRITE_SIZE_BYTES", 1 << 23) // 8 MiB
217+
env_config("TRANSACTION_MAX_USER_WRITE_SIZE_BYTES", 1 << 24) // 16 MiB
218218
});
219219

220220
/// SnapshotManager maintains a bounded time range of versions,
@@ -340,11 +340,11 @@ pub static MAX_REACTOR_CALL_DEPTH: LazyLock<usize> =
340340

341341
/// Number of rows that can be read in a transaction.
342342
pub static TRANSACTION_MAX_READ_SIZE_ROWS: LazyLock<usize> =
343-
LazyLock::new(|| env_config("TRANSACTION_MAX_READ_SIZE_ROWS", 16384));
343+
LazyLock::new(|| env_config("TRANSACTION_MAX_READ_SIZE_ROWS", 32000));
344344

345345
/// Number of bytes that can be read in a transaction.
346346
pub static TRANSACTION_MAX_READ_SIZE_BYTES: LazyLock<usize> = LazyLock::new(|| {
347-
env_config("TRANSACTION_MAX_READ_SIZE_BYTES", 1 << 23) // 8 MiB
347+
env_config("TRANSACTION_MAX_READ_SIZE_BYTES", 1 << 24) // 16 MiB
348348
});
349349

350350
/// Maximum number of intervals that can be read in a transaction.

crates/isolate/src/tests/adversarial.rs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -343,9 +343,9 @@ async fn test_iterate_consumed(rt: TestRuntime) -> anyhow::Result<()> {
343343
#[convex_macro::test_runtime]
344344
async fn test_reads_too_large(rt: TestRuntime) -> anyhow::Result<()> {
345345
let t = UdfTest::default(rt).await?;
346-
// 16 documents per write * 256KiB per document * 5 writes = 20 MiB, which is
346+
// 32 documents per write * 256KiB per document * 5 writes = 40 MiB, which is
347347
// higher that the limit on reads.
348-
let count_per_write = 16.0;
348+
let count_per_write = 32.0;
349349
let mut ids: Vec<ConvexValue> = vec![];
350350
for _ in 0..5 {
351351
let more_ids = t
@@ -372,9 +372,9 @@ async fn test_reads_too_large(rt: TestRuntime) -> anyhow::Result<()> {
372372
#[convex_macro::test_runtime]
373373
async fn test_reads_large(rt: TestRuntime) -> anyhow::Result<()> {
374374
let t = UdfTest::default(rt).await?;
375-
// 6 documents per write * 256KiB per document * 5 writes = 7 MiB, which is
375+
// 12 documents per write * 256KiB per document * 5 writes = 14 MiB, which is
376376
// close to the limit on reads.
377-
let count_per_write = 6.0;
377+
let count_per_write = 12.0;
378378
let mut ids: Vec<ConvexValue> = vec![];
379379
for _ in 0..5 {
380380
let more_ids = t
@@ -413,7 +413,7 @@ async fn test_writes_too_big(rt: TestRuntime) -> anyhow::Result<()> {
413413
#[convex_macro::test_runtime]
414414
async fn test_writes_big(rt: TestRuntime) -> anyhow::Result<()> {
415415
let t = UdfTest::default(rt).await?;
416-
let count = 30.0;
416+
let count = 60.0;
417417
let mut log_lines = t
418418
.mutation_log_lines("adversarial:bigWrite", assert_obj!("count" => count))
419419
.await?;

npm-packages/udf-tests/convex/adversarial.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -77,15 +77,15 @@ export const queryLeak = query(async ({ db }) => {
7777
});
7878

7979
export const queryATon = query(async ({ db }) => {
80-
for (let i = 0; i < 15000; i++) {
80+
for (let i = 0; i < 30000; i++) {
8181
for await (const _row of db.query("test")) {
8282
break;
8383
}
8484
}
8585
});
8686

8787
export const queryTooManyTimes = query(async ({ db }) => {
88-
for (let i = 0; i < 5000; i++) {
88+
for (let i = 0; i < 10000; i++) {
8989
await db
9090
.query("test")
9191
.withIndex("by_hello", (q) => q.eq("hello", i))
@@ -249,13 +249,13 @@ export const oom = query(async ({ db }) => {
249249
});
250250

251251
export const tooManyWrites = mutation(async ({ db }) => {
252-
for (let i = 0; i < 8193; i++) {
252+
for (let i = 0; i < 16002; i++) {
253253
await db.insert("test", { counter: i });
254254
}
255255
});
256256

257257
export const manyWrites = mutation(async ({ db }) => {
258-
for (let i = 0; i < 8093; i++) {
258+
for (let i = 0; i < 15990; i++) {
259259
await db.insert("test", { counter: i });
260260
}
261261
});

0 commit comments

Comments
 (0)