Skip to content

Commit 6ebff9d

Browse files
committed
refactor: continue decoupling from chatgpt
1 parent 88ca564 commit 6ebff9d

File tree

12 files changed

+156
-108
lines changed

12 files changed

+156
-108
lines changed

src/api/chatgpt/getConversation.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ export const getConversation = async ({
1818
)
1919
const data = await res.json()
2020

21-
return data as Conversation
21+
return data as Conversation["chatgpt"]
2222
} catch (err) {
2323
console.error(err)
2424

src/api/chatgpt/getConversationTextdocs.ts

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,15 @@
1+
import type { Conversation, ConversationTextdocs } from "~utils/types"
12
import type {
23
CanvasHistoryResponse,
3-
CanvasMessageMetadata,
4-
Conversation,
5-
ConversationTextdocs
6-
} from "~utils/types"
4+
CanvasMessageMetadata
5+
} from "~utils/types/chatgpt"
76

87
export const getConversationTextdocs = async ({
98
conv,
109
headers,
1110
includeVersions
1211
}: {
13-
conv: Conversation
12+
conv: Conversation["chatgpt"]
1413
headers: any
1514
includeVersions?: boolean
1615
}) => {

src/api/getConversationTextdocs.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ type Params =
2121
| {
2222
model: "chatgpt"
2323
params: {
24-
rawConversation: Conversation
24+
rawConversation: Conversation["chatgpt"]
2525
headers: any
2626
includeVersions?: boolean
2727
}

src/background/functions/save.ts

Lines changed: 15 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -47,24 +47,31 @@ const save = async (
4747

4848
await storage.set("saveStatus", "fetching" as SaveStatus)
4949

50-
const headers = convertHeaders(rawHeaders)
50+
console.log({ rawHeaders })
51+
const headers = convertHeaders(
52+
rawHeaders.filter((h) => h.name.toLowerCase() != "cookie")
53+
)
5154
const rawConversation = await getConversation({
5255
model: model,
5356
params: { convId, headers }
5457
})
5558

5659
console.log({ rawConversation })
5760

58-
if (!rawConversation?.mapping) throw new Error("Conversation not found")
61+
if (!rawConversation) throw new Error("Conversation not found")
5962

60-
const textDocs =
61-
(await getConversationTextdocs({
62-
model: model as any,
63-
params: { rawConversation, headers, includeVersions: true }
64-
})) ?? []
63+
let textDocs: any[] = []
64+
if (model == "chatgpt") {
65+
textDocs =
66+
(await getConversationTextdocs({
67+
model: model as any,
68+
params: { rawConversation, headers, includeVersions: true }
69+
})) ?? []
70+
}
6571

6672
const conversation = parseConversation({
67-
model: "chatgpt",
73+
model: model,
74+
// @ts-ignore
6875
params: { rawConversation, textDocs }
6976
})
7077

src/background/index.ts

Lines changed: 21 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -71,6 +71,11 @@ main()
7171

7272
const trackedURLs = ["https://chatgpt.com/*", "https://chat.deepseek.com/*"]
7373

74+
const deepseekUrls = [
75+
"https://chat.deepseek.com/api/v0/chat/history_messages"
76+
// "ttps://chat.deepseek.com/api/v0/chat_session/fetch_page?count=100"
77+
]
78+
7479
chrome.webRequest.onSendHeaders.addListener(
7580
(res) => {
7681
if (
@@ -84,13 +89,26 @@ chrome.webRequest.onSendHeaders.addListener(
8489
if (
8590
// cacheHeaders ||
8691
!res.requestHeaders ||
87-
!res.requestHeaders.some((h) => h.name === "Authorization")
92+
!res.requestHeaders.some((h) => h.name.toLowerCase() === "authorization")
8893
)
8994
return
9095

96+
if (res.url.includes("chatgpt.com")) {
97+
session.set(STORAGE_KEYS.cacheHeaders, res.requestHeaders)
98+
storage.set(STORAGE_KEYS.hasCacheHeaders, true)
99+
return
100+
}
101+
102+
if (deepseekUrls.some((url) => res.url.includes(url))) {
103+
// console.log({ url: res.url, headers: res.requestHeaders })
104+
// console.log("Setting cache headers")
105+
106+
session.set(STORAGE_KEYS.cacheHeaders, res.requestHeaders)
107+
storage.set(STORAGE_KEYS.hasCacheHeaders, true)
108+
return
109+
}
110+
91111
// cacheHeaders = res.requestHeaders
92-
session.set(STORAGE_KEYS.cacheHeaders, res.requestHeaders)
93-
storage.set(STORAGE_KEYS.hasCacheHeaders, true)
94112
},
95113
{ urls: trackedURLs, types: ["xmlhttprequest"] },
96114
["requestHeaders", "extraHeaders"]

src/background/messages/getCurrentTab.ts

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ import type { PlasmoMessaging } from "@plasmohq/messaging"
44
import { Storage } from "@plasmohq/storage"
55

66
import { STORAGE_KEYS } from "~utils/consts"
7+
import type { SupportedModels } from "~utils/types"
78

89
const handler: PlasmoMessaging.MessageHandler = async (req, res) => {
910
try {
@@ -16,18 +17,18 @@ const handler: PlasmoMessaging.MessageHandler = async (req, res) => {
1617

1718
const tab = tabs[0]
1819

19-
if (tab.url) {
20-
const tabURL = new URL(tab.url)
20+
let model: SupportedModels | null = null
2121

22-
if (tabURL.hostname.includes("chatgpt.com")) {
23-
await storage.set(STORAGE_KEYS.model, "chatgpt")
24-
}
25-
if (tabURL.hostname.includes("chat.deepseek.com")) {
26-
await storage.set(STORAGE_KEYS.model, "deepseek")
27-
}
22+
if (tab.url?.includes("chatgpt.com")) {
23+
model = "chatgpt"
24+
await storage.set(STORAGE_KEYS.model, "chatgpt")
25+
}
26+
if (tab.url?.includes("chat.deepseek.com")) {
27+
model = "deepseek"
28+
await storage.set(STORAGE_KEYS.model, "deepseek")
2829
}
2930

30-
res.send({ tabId: tab.id, tabUrl: tab.url })
31+
res.send({ tabId: tab.id, tabUrl: tab.url, model })
3132
} catch (err) {
3233
console.error(err)
3334
res.send({ err })

src/popup/IndexPopup.tsx

Lines changed: 16 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,14 @@ import bannerEco1 from "data-base64:../../assets/banner-eco-1.png"
33

44
import { useStorage } from "@plasmohq/storage/hook"
55

6-
import type {
7-
AutosaveStatus,
8-
Error,
9-
PopupEnum,
10-
SaveBehavior,
11-
SaveStatus,
12-
StoredDatabase
6+
import {
7+
type AutosaveStatus,
8+
type Error,
9+
type PopupEnum,
10+
type SaveBehavior,
11+
type SaveStatus,
12+
type StoredDatabase,
13+
type SupportedModels
1314
} from "~utils/types"
1415

1516
import "~styles.css"
@@ -38,11 +39,13 @@ import {
3839
i18n,
3940
updateChatConfig
4041
} from "~utils/functions"
42+
import { getConversationIdFromUrl } from "~utils/functions/llms"
4143

4244
import ConflictPopup from "./ConflictPopup"
4345

4446
function IndexPopup() {
4547
const [popup, setPopup] = useStorage<PopupEnum>(STORAGE_KEYS.popup, "index")
48+
const [model] = useStorage<SupportedModels | undefined>(STORAGE_KEYS.model)
4649
const [selectedDB, setSelectedDB] = useStorage<number>(
4750
STORAGE_KEYS.selectedDB,
4851
0
@@ -83,9 +86,12 @@ function IndexPopup() {
8386
const savePercent = useSavePercentage(saveStatus, 5000)
8487

8588
useEffect(() => {
86-
sendToBackground({ name: "getCurrentTab" }).then(({ tabUrl }) => {
87-
const id = tabUrl?.split("/c/").pop()
88-
setChatID(id?.length != 36 ? null : id)
89+
console.log({ cacheHeaders, chatID })
90+
}, [cacheHeaders, chatID])
91+
92+
useEffect(() => {
93+
sendToBackground({ name: "getCurrentTab" }).then(({ tabUrl, model }) => {
94+
setChatID(getConversationIdFromUrl(model, tabUrl))
8995
})
9096
}, [])
9197

src/popup/index.tsx

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -66,11 +66,8 @@ export default function Wrapper() {
6666
active: true,
6767
currentWindow: true
6868
})
69-
if (
70-
!tabs[0].url?.match(
71-
/^((https:\/\/chat.openai.com)|(https:\/\/chatgpt.com)).*/
72-
)
73-
) {
69+
const urlObj = new URL(tabs[0].url!)
70+
if (!urlObj.hostname.match(/((chatgpt.com)|(chat.deepseek.com))/)) {
7471
if (!popup || popup === "index" || popup === "save")
7572
await setPopup("wrongpage")
7673
} else if (popup === "wrongpage") {

src/utils/functions/chatgpt.ts

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,8 @@
1-
import type {
2-
CanvasMessageMetadata,
3-
Conversation,
4-
ConversationTextdocs,
5-
Message
6-
} from "~utils/types"
1+
import type { Conversation, ConversationTextdocs, Message } from "~utils/types"
2+
import type { CanvasMessageMetadata } from "~utils/types/chatgpt"
73

84
export const parseConversation = (
9-
rawConv: Conversation,
5+
rawConv: Conversation["chatgpt"],
106
textDocs: ConversationTextdocs
117
) => {
128
const { conversation_id: id, title, mapping } = rawConv
@@ -45,7 +41,7 @@ export const parseConversation = (
4541
},
4642
{ prev: "system", rawPrompts: [] } as {
4743
prev: string
48-
rawPrompts: Message[]
44+
rawPrompts: Message["chatgpt"][]
4945
}
5046
)
5147

@@ -66,8 +62,8 @@ export const parseConversation = (
6662
}
6763

6864
export const flattenMessage = (
69-
msg: Message,
70-
mapping: Conversation["mapping"],
65+
msg: Message["chatgpt"],
66+
mapping: Conversation["chatgpt"]["mapping"],
7167
flattenedMessage: string[],
7268
textDocs: ConversationTextdocs
7369
) => {

src/utils/functions/llms.ts

Lines changed: 24 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import { parseConversation as chatgptParseConversation } from "~utils/functions/chatgpt"
2+
import { parseConversation as deepseekParseConversation } from "~utils/functions/deepseek"
23
import type {
34
Conversation,
45
ConversationTextdocs,
@@ -12,30 +13,44 @@ export const parseConversation = ({
1213
switch (model) {
1314
case "chatgpt":
1415
return chatgptParseConversation(params.rawConversation, params.textDocs)
16+
case "deepseek":
17+
return deepseekParseConversation(params.rawConversation)
1518
default:
1619
throw new Error("Model not supported")
1720
}
1821
}
1922

20-
type ParseConversationParams = {
21-
model: "chatgpt"
22-
params: {
23-
rawConversation: Conversation
24-
textDocs: ConversationTextdocs
25-
}
26-
}
23+
type ParseConversationParams =
24+
| {
25+
model: "chatgpt"
26+
params: {
27+
rawConversation: Conversation["chatgpt"]
28+
textDocs: ConversationTextdocs
29+
}
30+
}
31+
| {
32+
model: "deepseek"
33+
params: {
34+
rawConversation: Conversation["deepseek"]
35+
textDocs: null
36+
}
37+
}
2738

2839
export const getConversationIdFromUrl = (
2940
model: SupportedModels,
3041
url: string
3142
) => {
3243
const urlObj = new URL(url)
3344

45+
let id: string | undefined
46+
3447
switch (model) {
3548
case "chatgpt":
36-
return urlObj.pathname.split("/").pop()
49+
id = urlObj.pathname.split("/").pop()
50+
return id?.length != 36 ? null : id
3751
case "deepseek":
38-
return urlObj.pathname.split("/").pop()
52+
id = urlObj.pathname.split("/").pop()
53+
return id?.length != 36 ? null : id
3954
default:
4055
throw new Error("Model not supported")
4156
}

src/utils/types/chatgpt.ts

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
export type ChatGPTConversation = {
2+
title: string
3+
conversation_id: string
4+
mapping: Record<string, ChatGPTMessage>
5+
}
6+
7+
export type ChatGPTMessage = {
8+
message?: {
9+
create_time: number
10+
author?: {
11+
role: string
12+
name: string
13+
}
14+
content: {
15+
content_type: string
16+
parts?: any[]
17+
text?: string
18+
}
19+
recipient: string
20+
metadata: Record<string, any>
21+
end_turn?: true
22+
}
23+
children?: string[]
24+
}
25+
26+
export type CanvasMessageMetadata = {
27+
textdoc_id: string
28+
textdoc_type: string
29+
version: number
30+
title: string
31+
}
32+
33+
export type CanvasHistoryResponse = {
34+
previous_doc_states: {
35+
id: string
36+
version: number
37+
title: string
38+
textdoc_type: string
39+
content: string
40+
comments: []
41+
updated_at: string
42+
}[]
43+
}
44+
45+
export type CreateCanvasData = {
46+
name: string
47+
title?: string // futureproofing
48+
type: string
49+
content: string
50+
}

0 commit comments

Comments
 (0)