Skip to content

Commit b84fc36

Browse files
committed
fix: default model & persistent conversation & selected text
1 parent d1e593b commit b84fc36

File tree

4 files changed

+29
-49
lines changed

4 files changed

+29
-49
lines changed

internal/api/chat/list_supported_models_v2.go

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -27,14 +27,6 @@ func (s *ChatServerV2) ListSupportedModels(
2727
var models []*chatv2.SupportedModel
2828
if strings.TrimSpace(settings.OpenAIAPIKey) == "" {
2929
models = []*chatv2.SupportedModel{
30-
{
31-
Name: "GPT-4o",
32-
Slug: "openai/gpt-4o",
33-
TotalContext: 128000,
34-
MaxOutput: 16400,
35-
InputPrice: 250,
36-
OutputPrice: 1000,
37-
},
3830
{
3931
Name: "GPT-4.1",
4032
Slug: "openai/gpt-4.1",
@@ -43,6 +35,14 @@ func (s *ChatServerV2) ListSupportedModels(
4335
InputPrice: 200,
4436
OutputPrice: 800,
4537
},
38+
{
39+
Name: "GPT-4o",
40+
Slug: "openai/gpt-4o",
41+
TotalContext: 128000,
42+
MaxOutput: 16400,
43+
InputPrice: 250,
44+
OutputPrice: 1000,
45+
},
4646
{
4747
Name: "GPT-4.1-mini",
4848
Slug: "openai/gpt-4.1-mini",
@@ -78,14 +78,6 @@ func (s *ChatServerV2) ListSupportedModels(
7878
}
7979
} else {
8080
models = []*chatv2.SupportedModel{
81-
{
82-
Name: "GPT-4o",
83-
Slug: openai.ChatModelGPT4o,
84-
TotalContext: 128000,
85-
MaxOutput: 16400,
86-
InputPrice: 250,
87-
OutputPrice: 1000,
88-
},
8981
{
9082
Name: "GPT-4.1",
9183
Slug: openai.ChatModelGPT4_1,
@@ -94,6 +86,14 @@ func (s *ChatServerV2) ListSupportedModels(
9486
InputPrice: 200,
9587
OutputPrice: 800,
9688
},
89+
{
90+
Name: "GPT-4o",
91+
Slug: openai.ChatModelGPT4o,
92+
TotalContext: 128000,
93+
MaxOutput: 16400,
94+
InputPrice: 250,
95+
OutputPrice: 1000,
96+
},
9797
{
9898
Name: "GPT-4.1-mini",
9999
Slug: openai.ChatModelGPT4_1Mini,

webapp/_webapp/src/stores/conversation/conversation-store.ts

Lines changed: 9 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import { create } from "zustand";
2-
import { persist, createJSONStorage } from "zustand/middleware";
32
import { Conversation, ConversationSchema } from "../../pkg/gen/apiclient/chat/v2/chat_pb";
43
import { fromJson } from "@bufbuild/protobuf";
54
import { useConversationUiStore } from "./conversation-ui-store";
@@ -13,36 +12,15 @@ interface ConversationStore {
1312
setIsStreaming: (isStreaming: boolean) => void;
1413
}
1514

16-
export const useConversationStore = create<ConversationStore>()(
17-
persist(
18-
(set, get) => ({
19-
currentConversation: newConversation(),
20-
setCurrentConversation: (conversation: Conversation) => set({ currentConversation: conversation }),
21-
updateCurrentConversation: (updater: (conversation: Conversation) => Conversation) =>
22-
set({ currentConversation: updater(get().currentConversation) }),
23-
startFromScratch: () => set({ currentConversation: newConversation() }),
24-
isStreaming: false,
25-
setIsStreaming: (isStreaming: boolean) => set({ isStreaming }),
26-
}),
27-
{
28-
name: "pd.conversation-storage",
29-
storage: createJSONStorage(() => localStorage, {
30-
replacer: (_key, value) => {
31-
if (typeof value === "bigint") {
32-
return value.toString() + "n";
33-
}
34-
return value;
35-
},
36-
reviver: (_key, value) => {
37-
if (typeof value === "string" && /^-?\d+n$/.test(value)) {
38-
return BigInt(value.slice(0, -1));
39-
}
40-
return value;
41-
},
42-
}),
43-
},
44-
),
45-
);
15+
export const useConversationStore = create<ConversationStore>((set, get) => ({
16+
currentConversation: newConversation(),
17+
setCurrentConversation: (conversation: Conversation) => set({ currentConversation: conversation }),
18+
updateCurrentConversation: (updater: (conversation: Conversation) => Conversation) =>
19+
set({ currentConversation: updater(get().currentConversation) }),
20+
startFromScratch: () => set({ currentConversation: newConversation() }),
21+
isStreaming: false,
22+
setIsStreaming: (isStreaming: boolean) => set({ isStreaming }),
23+
}));
4624

4725
export function newConversation(): Conversation {
4826
const modelSlug = useConversationUiStore.getState().lastUsedModelSlug;

webapp/_webapp/src/stores/conversation/conversation-ui-store.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ export const useConversationUiStore = create<ConversationUiStore>()(
103103
heightCollapseRequired: false,
104104
setHeightCollapseRequired: (heightCollapseRequired: boolean) => set({ heightCollapseRequired }),
105105

106-
lastUsedModelSlug: "gpt-4.1",
106+
lastUsedModelSlug: "openai/gpt-4.1",
107107
setLastUsedModelSlug: (lastUsedModelSlug: string) => set({ lastUsedModelSlug }),
108108

109109
resetPosition: () => {

webapp/_webapp/src/views/chat/footer/index.tsx

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,7 @@ export function PromptInput() {
5959
const setIsStreaming = useConversationStore((s) => s.setIsStreaming);
6060

6161
const selectedText = useSelectionStore((s) => s.selectedText);
62+
const clearSelection = useSelectionStore((s) => s.clear);
6263

6364
const { sendMessageStream } = useSendMessageStream();
6465
const minimalistMode = useSettingStore((s) => s.minimalistMode);
@@ -74,10 +75,11 @@ export function PromptInput() {
7475
userId: user?.id,
7576
});
7677
setPrompt("");
78+
clearSelection();
7779
setIsStreaming(true);
7880
await sendMessageStream(prompt, selectedText ?? "");
7981
setIsStreaming(false);
80-
}, [sendMessageStream, prompt, selectedText, user?.id, setIsStreaming, setPrompt]);
82+
}, [sendMessageStream, prompt, selectedText, user?.id, setIsStreaming, setPrompt, clearSelection]);
8183
const handleKeyDown = useCallback(
8284
async (e: React.KeyboardEvent<HTMLTextAreaElement>) => {
8385
// Check if IME composition is in progress to avoid submitting during Chinese input

0 commit comments

Comments
 (0)