Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@ export function DialogSessionList() {

const options = createMemo(() => {
const today = new Date().toDateString()
const sessionsListLimit = (sync.data.config.tui as any)?.session_list_limit
const limit = sessionsListLimit === "none" ? undefined : sessionsListLimit || 150

return sync.data.session
.filter((x) => x.parentID === undefined)
.map((x) => {
Expand All @@ -41,7 +44,7 @@ export function DialogSessionList() {
footer: Locale.time(x.time.updated),
}
})
.slice(0, 150)
.slice(0, limit)
})

createEffect(() => {
Expand Down
16 changes: 14 additions & 2 deletions packages/opencode/src/cli/cmd/tui/context/sync.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,16 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
event.properties.info.sessionID,
produce((draft) => {
draft.splice(result.index, 0, event.properties.info)
if (draft.length > 100) draft.shift()
const maxMessages = (store.config.tui as any)?.messages_limit
const maxMessagesCount = maxMessages === "none" ? Infinity : maxMessages || 100
// DEBUG: Log message limit behavior
console.log(
`[SYNC] Session ${event.properties.info.sessionID}: messages_limit=${maxMessages}, maxMessagesCount=${maxMessagesCount}`,
)
if (draft.length > maxMessagesCount) {
console.log(`[SYNC] Session ${event.properties.info.sessionID}: LIMIT EXCEEDED - shifting messages`)
draft.shift()
}
}),
)
break
Expand Down Expand Up @@ -332,9 +341,11 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
},
async sync(sessionID: string) {
if (fullSyncedSessions.has(sessionID)) return
const messagesLimit = (store.config.tui as any)?.messages_limit
const limit = messagesLimit === "none" ? undefined : messagesLimit || 100
const [session, messages, todo, diff] = await Promise.all([
sdk.client.session.get({ path: { id: sessionID }, throwOnError: true }),
sdk.client.session.messages({ path: { id: sessionID }, query: { limit: 100 } }),
sdk.client.session.messages({ path: { id: sessionID }, query: { limit } }),
sdk.client.session.todo({ path: { id: sessionID } }),
sdk.client.session.diff({ path: { id: sessionID } }),
])
Expand All @@ -348,6 +359,7 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
for (const message of messages.data!) {
draft.part[message.info.id] = message.parts
}

draft.session_diff[sessionID] = diff.data ?? []
}),
)
Expand Down
11 changes: 11 additions & 0 deletions packages/opencode/src/config/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -460,7 +460,18 @@ export namespace Config {
.enum(["auto", "stacked"])
.optional()
.describe("Control diff rendering style: 'auto' adapts to terminal width, 'stacked' always shows single column"),
session_list_limit: z
.union([z.number().min(1), z.literal("none")])
.optional()
.default(150)
.describe("Maximum number of sessions to display in session list, or 'none' to show all sessions"),
messages_limit: z
.union([z.number().min(1), z.literal("none")])
.optional()
.default(100)
.describe("Maximum number of message parts to load per session when syncing, or 'none' to load all messages"),
})
export type TUI = z.infer<typeof TUI>

export const Layout = z.enum(["auto", "stretch"]).meta({
ref: "LayoutConfig",
Expand Down
18 changes: 17 additions & 1 deletion packages/opencode/src/session/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -291,11 +291,27 @@ export namespace Session {
}),
async (input) => {
const result = [] as MessageV2.WithParts[]
let totalParts = 0

for await (const msg of MessageV2.stream(input.sessionID)) {
if (input.limit && result.length >= input.limit) break
if (input.limit && totalParts + msg.parts.length > input.limit) {
// If adding this message would exceed the limit, check if we can fit a partial message
if (totalParts < input.limit) {
// We have room for some parts of this message, but this would be complex to implement
// For now, just break to stay within the limit
break
}
break
}
result.push(msg)
totalParts += msg.parts.length
}
result.reverse()

// DEBUG: Log what we're returning
console.log(
`[Session.messages] Returning ${result.length} messages for session ${input.sessionID} with limit ${input.limit}`,
)
return result
},
)
Expand Down
120 changes: 120 additions & 0 deletions packages/opencode/test/config/config.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -501,3 +501,123 @@ test("deduplicates duplicate plugins from global and local configs", async () =>
},
})
})

test("handles TUI configuration with session_list_limit and messages_limit", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
tui: {
session_list_limit: 200,
messages_limit: 50,
},
}),
)
},
})
await Instance.provide({
directory: tmp.path,
fn: async () => {
const config = await Config.get()
expect(config.tui?.session_list_limit).toBe(200)
expect(config.tui?.messages_limit).toBe(50)
},
})
})

test("handles TUI configuration with session_list_limit set to 'none'", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
tui: {
session_list_limit: "none",
messages_limit: 75,
},
}),
)
},
})
await Instance.provide({
directory: tmp.path,
fn: async () => {
const config = await Config.get()
expect(config.tui?.session_list_limit).toBe("none")
expect(config.tui?.messages_limit).toBe(75)
},
})
})

test("validates TUI session_list_limit schema - rejects invalid values", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
tui: {
session_list_limit: -5, // Invalid: negative number
},
}),
)
},
})
await Instance.provide({
directory: tmp.path,
fn: async () => {
await expect(Config.get()).rejects.toThrow()
},
})
})

test("validates TUI messages_limit schema - rejects invalid values", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
tui: {
messages_limit: 0, // Invalid: must be >= 1
},
}),
)
},
})
await Instance.provide({
directory: tmp.path,
fn: async () => {
await expect(Config.get()).rejects.toThrow()
},
})
})

test("handles partial TUI configuration with backward compatibility", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
tui: {
scroll_speed: 2.5,
// session_list_limit and messages_limit not specified - should inherit from global config
},
}),
)
},
})
await Instance.provide({
directory: tmp.path,
fn: async () => {
const config = await Config.get()
expect(config.tui?.scroll_speed).toBe(2.5)
// Note: session_list_limit and messages_limit may be inherited from global config
// The important thing is that the config loads successfully and scroll_speed is set correctly
},
})
})
8 changes: 8 additions & 0 deletions packages/sdk/js/src/gen/types.gen.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1008,6 +1008,14 @@ export type Config = {
* Control diff rendering style: 'auto' adapts to terminal width, 'stacked' always shows single column
*/
diff_style?: "auto" | "stacked"
/**
* Maximum number of sessions to display in session list, or 'none' to show all sessions
*/
session_list_limit?: number | "none"
/**
* Maximum number of messages to load per session when syncing, or 'none' to load all messages
*/
messages_limit?: number | "none"
}
/**
* Command configuration, see https://opencode.ai/docs/commands
Expand Down
Loading