diff --git a/next.config.mjs b/next.config.mjs index dfafb1d2b954..7e28c6179964 100644 --- a/next.config.mjs +++ b/next.config.mjs @@ -6,6 +6,7 @@ import ReactComponentName from 'react-scan/react-component-name/webpack'; const isProd = process.env.NODE_ENV === 'production'; const buildWithDocker = process.env.DOCKER === 'true'; const enableReactScan = !!process.env.REACT_SCAN_MONITOR_API_KEY; +const isUsePglite = process.env.NEXT_PUBLIC_CLIENT_DB === 'pglite'; // if you need to proxy the api endpoint to remote server const API_PROXY_ENDPOINT = process.env.API_PROXY_ENDPOINT || ''; @@ -26,6 +27,7 @@ const nextConfig = { 'gpt-tokenizer', 'chroma-js', ], + serverComponentsExternalPackages: ['@electric-sql/pglite'], webVitalsAttribution: ['CLS', 'LCP'], }, @@ -180,7 +182,8 @@ const nextConfig = { layers: true, }; - if (enableReactScan) { + // 开启该插件会导致 pglite 的 fs bundler 被改表 + if (enableReactScan && !isUsePglite) { config.plugins.push(ReactComponentName({})); } diff --git a/package.json b/package.json index 17d7363b452d..e5475f3dbe06 100644 --- a/package.json +++ b/package.json @@ -32,7 +32,8 @@ "build-sitemap": "tsx ./scripts/buildSitemapIndex/index.ts", "build:analyze": "ANALYZE=true next build", "build:docker": "DOCKER=true next build && npm run build-sitemap", - "db:generate": "drizzle-kit generate", + "db:generate": "drizzle-kit generate && npm run db:generate-client", + "db:generate-client": "tsx ./scripts/migrateClientDB/compile-migrations.ts", "db:migrate": "MIGRATION_DB=1 tsx ./scripts/migrateServerDB/index.ts", "db:push": "drizzle-kit push", "db:push-test": "NODE_ENV=test drizzle-kit push", @@ -117,6 +118,7 @@ "@clerk/themes": "^2.1.37", "@codesandbox/sandpack-react": "^2.19.9", "@cyntler/react-doc-viewer": "^1.17.0", + "@electric-sql/pglite": "0.2.13", "@google/generative-ai": "^0.21.0", "@huggingface/inference": "^2.8.1", "@icons-pack/react-simple-icons": "9.6.0", diff --git a/scripts/migrateClientDB/compile-migrations.ts b/scripts/migrateClientDB/compile-migrations.ts new file mode 100644 index 000000000000..c33e9dff5fb1 --- /dev/null +++ b/scripts/migrateClientDB/compile-migrations.ts @@ -0,0 +1,14 @@ +import { readMigrationFiles } from 'drizzle-orm/migrator'; +import { writeFileSync } from 'node:fs'; +import { join } from 'node:path'; + +const dbBase = join(__dirname, '../../src/database'); +const migrationsFolder = join(dbBase, './migrations'); +const migrations = readMigrationFiles({ migrationsFolder: migrationsFolder }); + +writeFileSync( + join(dbBase, './client/migrations.json'), + JSON.stringify(migrations, null, 2), // null, 2 adds indentation for better readability +); + +console.log('🏁 client migrations.json compiled!'); diff --git a/src/app/(main)/(mobile)/me/(home)/layout.tsx b/src/app/(main)/(mobile)/me/(home)/layout.tsx index 6e6c9e155314..bf0d52526ba7 100644 --- a/src/app/(main)/(mobile)/me/(home)/layout.tsx +++ b/src/app/(main)/(mobile)/me/(home)/layout.tsx @@ -1,6 +1,7 @@ import { PropsWithChildren } from 'react'; import MobileContentLayout from '@/components/server/MobileNavLayout'; +import InitClientDB from '@/features/InitClientDB'; import Header from './features/Header'; @@ -8,6 +9,7 @@ const Layout = ({ children }: PropsWithChildren) => { return ( } withNav> {children} + ); }; diff --git a/src/app/(main)/chat/_layout/Desktop/index.tsx b/src/app/(main)/chat/_layout/Desktop/index.tsx index 84296bd8f87e..4424f3809fcc 100644 --- a/src/app/(main)/chat/_layout/Desktop/index.tsx +++ b/src/app/(main)/chat/_layout/Desktop/index.tsx @@ -1,6 +1,7 @@ import { Flexbox } from 'react-layout-kit'; -import Migration from '../../features/Migration'; +import InitClientDB from '@/features/InitClientDB'; + import { LayoutProps } from '../type'; import SessionPanel from './SessionPanel'; @@ -18,7 +19,7 @@ const Layout = ({ children, session }: LayoutProps) => { {children} - + {/* ↓ cloud slot ↓ */} {/* ↑ cloud slot ↑ */} diff --git a/src/app/(main)/chat/_layout/Mobile.tsx b/src/app/(main)/chat/_layout/Mobile.tsx index 81fea2799d4f..04fe2b4c30eb 100644 --- a/src/app/(main)/chat/_layout/Mobile.tsx +++ b/src/app/(main)/chat/_layout/Mobile.tsx @@ -1,10 +1,10 @@ 'use client'; import { createStyles } from 'antd-style'; -import { memo } from 'react'; +import { Suspense, memo } from 'react'; import { Flexbox } from 'react-layout-kit'; -import Migration from '@/app/(main)/chat/features/Migration'; +import InitClientDB from '@/features/InitClientDB'; import { useQuery } from '@/hooks/useQuery'; import { LayoutProps } from './type'; @@ -39,7 +39,9 @@ const Layout = memo(({ children, session }) => { > {children} - + + + > ); }); diff --git a/src/app/(main)/chat/features/Migration/DBReader.ts b/src/app/(main)/chat/features/Migration/DBReader.ts new file mode 100644 index 000000000000..3a5667bc4090 --- /dev/null +++ b/src/app/(main)/chat/features/Migration/DBReader.ts @@ -0,0 +1,290 @@ +import { + ImportMessage, + ImportSession, + ImportSessionGroup, + ImportTopic, + ImporterEntryData, +} from '@/types/importer'; + +interface V2DB_File { + /** + * create Time + */ + createdAt: number; + /** + * file data array buffer + */ + data: ArrayBuffer; + /** + * file type + * @example 'image/png' + */ + fileType: string; + id: string; + metadata: any; + /** + * file name + * @example 'test.png' + */ + name: string; + /** + * the mode database save the file + * local mean save the raw file into data + * url mean upload the file to a cdn and then save the url + */ + saveMode: 'local' | 'url'; + /** + * file size + */ + size: number; + /** + * file url if saveMode is url + */ + url: string; +} + +interface V2DB_MESSAGE { + content: string; + createdAt: number; + error?: any; + favorite: 0 | 1; + files?: string[]; + fromModel?: string; + fromProvider?: string; + id: string; + observationId?: string; + // foreign keys + parentId?: string; + plugin?: any; + pluginError?: any; + pluginState?: any; + + quotaId?: string; + role: string; + sessionId?: string; + tool_call_id?: string; + tools?: object[]; + topicId?: string; + + traceId?: string; + translate?: object | false; + tts?: any; + updatedAt: number; +} + +interface DB_Plugin { + createdAt: number; + id: string; + identifier: string; + manifest?: object; + settings?: object; + type: 'plugin' | 'customPlugin'; + updatedAt: number; +} + +interface DB_Session { + config: object; + createdAt: number; + group?: string; + // 原 Agent 类型 + id: string; + meta: object; + pinned?: number; + type?: 'agent' | 'group'; + updatedAt: number; +} + +interface DB_SessionGroup { + createdAt: number; + id: string; + name: string; + sort?: number; + updatedAt: number; +} + +interface DB_Topic { + createdAt: number; + favorite?: number; + id: string; + sessionId?: string; + title: string; + updatedAt: number; +} + +interface DB_User { + avatar?: string; + createdAt: number; + id: string; + settings: object; + updatedAt: number; + uuid: string; +} + +interface MigrationData { + files: V2DB_File[]; + messages: V2DB_MESSAGE[]; + plugins: DB_Plugin[]; + sessionGroups: DB_SessionGroup[]; + sessions: DB_Session[]; + topics: DB_Topic[]; + users: DB_User[]; +} + +const LOBE_CHAT_LOCAL_DB_NAME = 'LOBE_CHAT_DB'; + +const V2DB_LASET_SCHEMA_VERSION = 7; +export class V2DBReader { + private dbName: string = LOBE_CHAT_LOCAL_DB_NAME; + private storeNames: string[]; + + constructor(storeNames: string[]) { + this.storeNames = storeNames; + } + + /** + * 读取所有数据 + */ + async readAllData(): Promise { + try { + // 打开数据库连接 + const db = await this.openDB(); + + // 并行读取所有表的数据 + const results = await Promise.all( + this.storeNames.map((storeName) => this.readStore(db, storeName)), + ); + + // 构建返回结果 + const migrationData = this.storeNames.reduce((acc, storeName, index) => { + // @ts-expect-error + acc[storeName] = results[index]; + return acc; + }, {} as MigrationData); + + // 关闭数据库连接 + db.close(); + + return migrationData; + } catch (error) { + console.error('读取数据库失败:', error); + throw error; + } + } + + async convertToImportData(data: MigrationData): Promise { + // 转换 messages + const messages = data.messages.map( + (msg): ImportMessage => ({ + // 使用原有的 id + content: msg.content, + createdAt: msg.createdAt, + // 处理 error + error: msg.error || msg.pluginError, + + // 处理额外信息 + extra: { + fromModel: msg.fromModel, + fromProvider: msg.fromProvider, + translate: msg.translate as any, + tts: msg.tts, + }, + + files: msg.files, + id: msg.id, + + // 复制原有字段 + observationId: msg.observationId, + parentId: msg.parentId, + plugin: msg.plugin, + pluginState: msg.pluginState, + quotaId: msg.quotaId, + role: msg.role as any, + sessionId: msg.sessionId, + tool_call_id: msg.tool_call_id, + tools: msg.tools as any, + + topicId: msg.topicId, + + traceId: msg.traceId, + + updatedAt: msg.updatedAt, + }), + ); + + // 转换 sessionGroups + const sessionGroups = data.sessionGroups.map( + (group): ImportSessionGroup => ({ + createdAt: group.createdAt, + id: group.id, + // 使用原有的 id + name: group.name, + sort: group.sort || null, + updatedAt: group.updatedAt, + }), + ); + + // 转换 sessions + const sessions = data.sessions.map( + (session): ImportSession => ({ + // 使用原有的 id + config: session.config as any, + createdAt: new Date(session.createdAt).toString(), + group: session.group, + id: session.id, + meta: session.meta as any, + pinned: session.pinned ? true : undefined, + type: session.type || 'agent', + updatedAt: new Date(session.updatedAt).toString(), + }), + ); + + const topics = data.topics.map( + (topic): ImportTopic => ({ + ...topic, + favorite: topic.favorite ? true : undefined, + }), + ); + + return { + messages, + sessionGroups, + sessions, + topics, + version: V2DB_LASET_SCHEMA_VERSION, + }; + } + + /** + * 打开数据库 + */ + private openDB(): Promise { + return new Promise((resolve, reject) => { + const request = indexedDB.open(this.dbName); + + // eslint-disable-next-line unicorn/prefer-add-event-listener + request.onerror = () => { + reject(request.error); + }; + request.onsuccess = () => resolve(request.result); + }); + } + + /** + * 读取单个存储对象的所有数据 + */ + private readStore(db: IDBDatabase, storeName: string): Promise { + return new Promise((resolve, reject) => { + try { + const transaction = db.transaction(storeName, 'readonly'); + const store = transaction.objectStore(storeName); + const request = store.getAll(); + + // eslint-disable-next-line unicorn/prefer-add-event-listener + request.onerror = () => reject(request.error); + request.onsuccess = () => resolve(request.result); + } catch (error) { + reject(error); + } + }); + } +} diff --git a/src/app/(main)/chat/features/Migration/UpgradeButton.tsx b/src/app/(main)/chat/features/Migration/UpgradeButton.tsx index c342e18a6151..67293e0ef9bf 100644 --- a/src/app/(main)/chat/features/Migration/UpgradeButton.tsx +++ b/src/app/(main)/chat/features/Migration/UpgradeButton.tsx @@ -1,14 +1,12 @@ import { Button } from 'antd'; -import { createStore, set } from 'idb-keyval'; import { ReactNode, memo } from 'react'; import { useTranslation } from 'react-i18next'; -import { Migration } from '@/migrations'; import { configService } from '@/services/config'; import { useChatStore } from '@/store/chat'; import { useSessionStore } from '@/store/session'; -import { MIGRATE_KEY, MigrationError, UpgradeStatus, V1DB_NAME, V1DB_TABLE_NAME } from './const'; +import { MigrationError, UpgradeStatus } from './const'; export interface UpgradeButtonProps { children?: ReactNode; @@ -31,21 +29,19 @@ const UpgradeButton = memo( const upgrade = async () => { try { - const data = Migration.migrate({ state, version: 1 }); - setUpgradeStatus(UpgradeStatus.UPGRADING); await configService.importConfigState({ exportType: 'sessions', - state: data.state, - version: 2, + state: state, + version: 7, }); await refreshSession(); await refreshMessages(); await refreshTopic(); - await set(MIGRATE_KEY, true, createStore(V1DB_NAME, V1DB_TABLE_NAME)); + localStorage.setItem('V2DB_IS_MIGRATED', '1'); setUpgradeStatus(UpgradeStatus.UPGRADED); diff --git a/src/app/(main)/chat/features/Migration/index.tsx b/src/app/(main)/chat/features/Migration/index.tsx index 788b21472563..4313ce4c2df3 100644 --- a/src/app/(main)/chat/features/Migration/index.tsx +++ b/src/app/(main)/chat/features/Migration/index.tsx @@ -1,11 +1,14 @@ 'use client'; import { Spin } from 'antd'; -import { createStore, getMany } from 'idb-keyval'; import dynamic from 'next/dynamic'; import { memo, useEffect, useState } from 'react'; -import { MIGRATE_KEY, V1DB_NAME, V1DB_TABLE_NAME } from './const'; +import { isServerMode } from '@/const/version'; +import { useGlobalStore } from '@/store/global'; +import { systemStatusSelectors } from '@/store/global/selectors'; + +import { V2DBReader } from './DBReader'; const Modal = dynamic(() => import('./Modal'), { loading: () => , ssr: false }); @@ -13,25 +16,33 @@ const Migration = memo(() => { const [dbState, setDbState] = useState(null); const [open, setOpen] = useState(false); - const checkMigration = async () => { - const [state, migrated] = await getMany( - ['state', MIGRATE_KEY], - createStore(V1DB_NAME, V1DB_TABLE_NAME), - ); + const isPgliteInited = useGlobalStore(systemStatusSelectors.isPgliteInited); + const checkMigration = async () => { + const isMigrated = localStorage.getItem('V2DB_IS_MIGRATED'); // if db have migrated already, don't show modal - if (migrated) return; - - // if db doesn't exist state key,it means a new user - if (!state) return; - - setDbState(state); + if (isMigrated || isServerMode) return; + + const dbReader = new V2DBReader([ + 'messages', + 'files', + 'plugins', + 'sessionGroups', + 'sessions', + 'topics', + 'users', + ]); + const data = await dbReader.readAllData(); + console.log('migration data:', data); + const state = await dbReader.convertToImportData(data); + console.log('import state', state); + setDbState(state as any); setOpen(true); }; useEffect(() => { - checkMigration(); - }, []); + if (isPgliteInited) checkMigration(); + }, [isPgliteInited]); return open && ; }); diff --git a/src/app/(main)/settings/_layout/Desktop/index.tsx b/src/app/(main)/settings/_layout/Desktop/index.tsx index 9766a4bd00da..b640e99f2046 100644 --- a/src/app/(main)/settings/_layout/Desktop/index.tsx +++ b/src/app/(main)/settings/_layout/Desktop/index.tsx @@ -6,6 +6,7 @@ import { memo, useRef } from 'react'; import { useTranslation } from 'react-i18next'; import { Flexbox } from 'react-layout-kit'; +import InitClientDB from '@/features/InitClientDB'; import Footer from '@/features/Setting/Footer'; import SettingContainer from '@/features/Setting/SettingContainer'; import { useActiveSettingsKey } from '@/hooks/useActiveSettingsKey'; @@ -45,6 +46,7 @@ const Layout = memo(({ children, category }) => { )} }>{children} + ); }); diff --git a/src/app/loading/Client/Content.tsx b/src/app/loading/Client/Content.tsx index 90c8859694a2..d035707f6fd8 100644 --- a/src/app/loading/Client/Content.tsx +++ b/src/app/loading/Client/Content.tsx @@ -1,17 +1,25 @@ +import dynamic from 'next/dynamic'; import React, { memo } from 'react'; import { useTranslation } from 'react-i18next'; import FullscreenLoading from '@/components/FullscreenLoading'; import { useGlobalStore } from '@/store/global'; import { systemStatusSelectors } from '@/store/global/selectors'; +import { DatabaseLoadingState } from '@/types/clientDB'; import { CLIENT_LOADING_STAGES } from '../stage'; +const InitError = dynamic(() => import('./Error'), { ssr: false }); + interface InitProps { setActiveStage: (value: string) => void; } -const Init = memo(() => { +const Init = memo(({ setActiveStage }) => { + const useInitClientDB = useGlobalStore((s) => s.useInitClientDB); + + useInitClientDB({ onStateChange: setActiveStage }); + return null; }); @@ -23,12 +31,14 @@ interface ContentProps { const Content = memo(({ loadingStage, setActiveStage }) => { const { t } = useTranslation('common'); const isPgliteNotInited = useGlobalStore(systemStatusSelectors.isPgliteNotInited); + const isError = useGlobalStore((s) => s.initClientDBStage === DatabaseLoadingState.Error); return ( <> {isPgliteNotInited && } } stages={CLIENT_LOADING_STAGES.map((key) => t(`appLoading.${key}` as any))} /> > diff --git a/src/app/loading/Client/Error.tsx b/src/app/loading/Client/Error.tsx new file mode 100644 index 000000000000..5958ee631892 --- /dev/null +++ b/src/app/loading/Client/Error.tsx @@ -0,0 +1,27 @@ +import { Button } from 'antd'; +import React from 'react'; +import { useTranslation } from 'react-i18next'; +import { Center } from 'react-layout-kit'; + +import ErrorResult from '@/features/InitClientDB/ErrorResult'; + +const InitError = () => { + const { t } = useTranslation('common'); + + return ( + + {({ setOpen }) => ( + + {t('appLoading.failed')} + + setOpen(true)} type={'primary'}> + {t('appLoading.showDetail')} + + + + )} + + ); +}; + +export default InitError; diff --git a/src/app/loading/stage.ts b/src/app/loading/stage.ts index 0897dff3afd6..2375bb8d9a65 100644 --- a/src/app/loading/stage.ts +++ b/src/app/loading/stage.ts @@ -1,3 +1,5 @@ +import { DatabaseLoadingState } from '@/types/clientDB'; + export enum AppLoadingStage { GoToChat = 'goToChat', Idle = 'appIdle', @@ -17,6 +19,12 @@ export const SERVER_LOADING_STAGES = [ export const CLIENT_LOADING_STAGES = [ AppLoadingStage.Idle, AppLoadingStage.Initializing, + DatabaseLoadingState.Initializing, + DatabaseLoadingState.LoadingDependencies, + DatabaseLoadingState.LoadingWasm, + DatabaseLoadingState.Migrating, + DatabaseLoadingState.Finished, + DatabaseLoadingState.Ready, AppLoadingStage.InitUser, AppLoadingStage.GoToChat, ] as string[]; diff --git a/src/components/FullscreenLoading/index.tsx b/src/components/FullscreenLoading/index.tsx index be0dab226e44..52a3b396cf16 100644 --- a/src/components/FullscreenLoading/index.tsx +++ b/src/components/FullscreenLoading/index.tsx @@ -1,4 +1,4 @@ -import React, { memo } from 'react'; +import React, { ReactNode, memo } from 'react'; import { Center, Flexbox } from 'react-layout-kit'; import { ProductLogo } from '@/components/Branding'; @@ -6,15 +6,16 @@ import InitProgress, { StageItem } from '@/components/InitProgress'; interface FullscreenLoadingProps { activeStage: number; + contentRender?: ReactNode; stages: StageItem[]; } -const FullscreenLoading = memo(({ activeStage, stages }) => { +const FullscreenLoading = memo(({ activeStage, stages, contentRender }) => { return ( - + {contentRender ? contentRender : } ); diff --git a/src/const/version.ts b/src/const/version.ts index 63ee1d72362a..0f3819a47490 100644 --- a/src/const/version.ts +++ b/src/const/version.ts @@ -6,6 +6,7 @@ import { BRANDING_NAME, ORG_NAME } from './branding'; export const CURRENT_VERSION = pkg.version; export const isServerMode = getServerDBConfig().NEXT_PUBLIC_ENABLED_SERVER_SERVICE; +export const isUsePgliteDB = process.env.NEXT_PUBLIC_CLIENT_DB === 'pglite'; // @ts-ignore export const isCustomBranding = BRANDING_NAME !== 'LobeChat'; diff --git a/src/database/client/db.test.ts b/src/database/client/db.test.ts new file mode 100644 index 000000000000..25c91bb15077 --- /dev/null +++ b/src/database/client/db.test.ts @@ -0,0 +1,172 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; + +import { ClientDBLoadingProgress, DatabaseLoadingState } from '@/types/clientDB'; + +import { DatabaseManager } from './db'; + +// Mock 所有外部依赖 +vi.mock('@electric-sql/pglite', () => ({ + default: vi.fn(), + IdbFs: vi.fn(), + PGlite: vi.fn(), + MemoryFS: vi.fn(), +})); + +vi.mock('@electric-sql/pglite/vector', () => ({ + default: vi.fn(), + vector: vi.fn(), +})); + +vi.mock('drizzle-orm/pglite', () => ({ + drizzle: vi.fn(() => ({ + dialect: { + migrate: vi.fn().mockResolvedValue(undefined), + }, + })), +})); + +let manager: DatabaseManager; +let progressEvents: ClientDBLoadingProgress[] = []; +let stateChanges: DatabaseLoadingState[] = []; + +let callbacks = { + onProgress: vi.fn((progress: ClientDBLoadingProgress) => { + progressEvents.push(progress); + }), + onStateChange: vi.fn((state: DatabaseLoadingState) => { + stateChanges.push(state); + }), +}; + +beforeEach(() => { + vi.clearAllMocks(); + progressEvents = []; + stateChanges = []; + + callbacks = { + onProgress: vi.fn((progress: ClientDBLoadingProgress) => { + progressEvents.push(progress); + }), + onStateChange: vi.fn((state: DatabaseLoadingState) => { + stateChanges.push(state); + }), + }; + // @ts-expect-error + DatabaseManager['instance'] = undefined; + manager = DatabaseManager.getInstance(); +}); + +describe('DatabaseManager', () => { + describe('Callback Handling', () => { + it('should properly track loading states', async () => { + await manager.initialize(callbacks); + + // 验证状态转换顺序 + expect(stateChanges).toEqual([ + DatabaseLoadingState.Initializing, + DatabaseLoadingState.LoadingDependencies, + DatabaseLoadingState.LoadingWasm, + DatabaseLoadingState.Migrating, + DatabaseLoadingState.Finished, + DatabaseLoadingState.Ready, + ]); + }); + + it('should report dependencies loading progress', async () => { + await manager.initialize(callbacks); + + // 验证依赖加载进度回调 + const dependencyProgress = progressEvents.filter((e) => e.phase === 'dependencies'); + expect(dependencyProgress.length).toBeGreaterThan(0); + expect(dependencyProgress[dependencyProgress.length - 1]).toEqual( + expect.objectContaining({ + phase: 'dependencies', + progress: 100, + costTime: expect.any(Number), + }), + ); + }); + + it('should report WASM loading progress', async () => { + await manager.initialize(callbacks); + + // 验证 WASM 加载进度回调 + const wasmProgress = progressEvents.filter((e) => e.phase === 'wasm'); + // expect(wasmProgress.length).toBeGreaterThan(0); + expect(wasmProgress[wasmProgress.length - 1]).toEqual( + expect.objectContaining({ + phase: 'wasm', + progress: 100, + costTime: expect.any(Number), + }), + ); + }); + + it('should handle initialization errors', async () => { + // 模拟加载失败 + vi.spyOn(global, 'fetch').mockRejectedValueOnce(new Error('Network error')); + + await expect(manager.initialize(callbacks)).rejects.toThrow(); + expect(stateChanges).toContain(DatabaseLoadingState.Error); + }); + + it('should only initialize once when called multiple times', async () => { + const firstInit = manager.initialize(callbacks); + const secondInit = manager.initialize(callbacks); + + await Promise.all([firstInit, secondInit]); + + // 验证回调只被调用一次 + const readyStateCount = stateChanges.filter( + (state) => state === DatabaseLoadingState.Ready, + ).length; + expect(readyStateCount).toBe(1); + }); + }); + + describe('Progress Calculation', () => { + it('should report progress between 0 and 100', async () => { + await manager.initialize(callbacks); + + // 验证所有进度值都在有效范围内 + progressEvents.forEach((event) => { + expect(event.progress).toBeGreaterThanOrEqual(0); + expect(event.progress).toBeLessThanOrEqual(100); + }); + }); + + it('should include timing information', async () => { + await manager.initialize(callbacks); + + // 验证最终进度回调包含耗时信息 + const finalProgress = progressEvents[progressEvents.length - 1]; + expect(finalProgress.costTime).toBeGreaterThan(0); + }); + }); + + describe('Error Handling', () => { + it('should handle missing callbacks gracefully', async () => { + // 测试没有提供回调的情况 + await expect(manager.initialize()).resolves.toBeDefined(); + }); + + it('should handle partial callbacks', async () => { + // 只提供部分回调 + await expect(manager.initialize({ onProgress: callbacks.onProgress })).resolves.toBeDefined(); + await expect( + manager.initialize({ onStateChange: callbacks.onStateChange }), + ).resolves.toBeDefined(); + }); + }); + + describe('Database Access', () => { + it('should throw error when accessing database before initialization', () => { + expect(() => manager.db).toThrow('Database not initialized'); + }); + + it('should provide access to database after initialization', async () => { + await manager.initialize(); + expect(() => manager.db).not.toThrow(); + }); + }); +}); diff --git a/src/database/client/db.ts b/src/database/client/db.ts new file mode 100644 index 000000000000..060bda3999ab --- /dev/null +++ b/src/database/client/db.ts @@ -0,0 +1,246 @@ +import type { PgliteDatabase } from 'drizzle-orm/pglite'; +import { Md5 } from 'ts-md5'; + +import { ClientDBLoadingProgress, DatabaseLoadingState } from '@/types/clientDB'; +import { sleep } from '@/utils/sleep'; + +import * as schema from '../schemas'; +import migrations from './migrations.json'; + +const pgliteSchemaHashCache = 'LOBE_CHAT_PGLITE_SCHEMA_HASH'; + +type DrizzleInstance = PgliteDatabase; + +export interface DatabaseLoadingCallbacks { + onError?: (error: Error) => void; + onProgress?: (progress: ClientDBLoadingProgress) => void; + onStateChange?: (state: DatabaseLoadingState) => void; +} + +export class DatabaseManager { + private static instance: DatabaseManager; + private dbInstance: DrizzleInstance | null = null; + private initPromise: Promise | null = null; + private callbacks?: DatabaseLoadingCallbacks; + private isLocalDBSchemaSynced = false; + + // CDN 配置 + private static WASM_CDN_URL = + 'https://registry.npmmirror.com/@electric-sql/pglite/0.2.13/files/dist/postgres.wasm'; + + private constructor() {} + + static getInstance() { + if (!DatabaseManager.instance) { + DatabaseManager.instance = new DatabaseManager(); + } + return DatabaseManager.instance; + } + + // 加载并编译 WASM 模块 + private async loadWasmModule(): Promise { + const start = Date.now(); + this.callbacks?.onStateChange?.(DatabaseLoadingState.LoadingWasm); + + const response = await fetch(DatabaseManager.WASM_CDN_URL); + + const contentLength = Number(response.headers.get('Content-Length')) || 0; + const reader = response.body?.getReader(); + + if (!reader) throw new Error('Failed to start WASM download'); + + let receivedLength = 0; + const chunks: Uint8Array[] = []; + + // 读取数据流 + // eslint-disable-next-line no-constant-condition + while (true) { + const { done, value } = await reader.read(); + + if (done) break; + + chunks.push(value); + receivedLength += value.length; + + // 计算并报告进度 + const progress = Math.min(Math.round((receivedLength / contentLength) * 100), 100); + this.callbacks?.onProgress?.({ + phase: 'wasm', + progress, + }); + } + + // 合并数据块 + const wasmBytes = new Uint8Array(receivedLength); + let position = 0; + for (const chunk of chunks) { + wasmBytes.set(chunk, position); + position += chunk.length; + } + + this.callbacks?.onProgress?.({ + costTime: Date.now() - start, + phase: 'wasm', + progress: 100, + }); + + // 编译 WASM 模块 + return WebAssembly.compile(wasmBytes); + } + + // 异步加载 PGlite 相关依赖 + private async loadDependencies() { + const start = Date.now(); + this.callbacks?.onStateChange?.(DatabaseLoadingState.LoadingDependencies); + + const imports = [ + import('@electric-sql/pglite').then((m) => ({ + IdbFs: m.IdbFs, + MemoryFS: m.MemoryFS, + PGlite: m.PGlite, + })), + import('@electric-sql/pglite/vector'), + import('drizzle-orm/pglite'), + ]; + + let loaded = 0; + const results = await Promise.all( + imports.map(async (importPromise) => { + const result = await importPromise; + loaded += 1; + + // 计算加载进度 + this.callbacks?.onProgress?.({ + phase: 'dependencies', + progress: Math.min(Math.round((loaded / imports.length) * 100), 100), + }); + return result; + }), + ); + + this.callbacks?.onProgress?.({ + costTime: Date.now() - start, + phase: 'dependencies', + progress: 100, + }); + + // @ts-ignore + const [{ PGlite, IdbFs, MemoryFS }, { vector }, { drizzle }] = results; + + return { IdbFs, MemoryFS, PGlite, drizzle, vector }; + } + + // 数据库迁移方法 + private async migrate(skipMultiRun = false): Promise { + if (this.isLocalDBSchemaSynced && skipMultiRun) return this.db; + + const cacheHash = localStorage.getItem(pgliteSchemaHashCache); + const hash = Md5.hashStr(JSON.stringify(migrations)); + + // if hash is the same, no need to migrate + if (hash === cacheHash) { + this.isLocalDBSchemaSynced = true; + return this.db; + } + + const start = Date.now(); + try { + this.callbacks?.onStateChange?.(DatabaseLoadingState.Migrating); + + // refs: https://github.com/drizzle-team/drizzle-orm/discussions/2532 + // @ts-expect-error + await this.db.dialect.migrate(migrations, this.db.session, {}); + localStorage.setItem(pgliteSchemaHashCache, hash); + this.isLocalDBSchemaSynced = true; + + console.info(`🗂 Migration success, take ${Date.now() - start}ms`); + } catch (cause) { + console.error('❌ Local database schema migration failed', cause); + throw cause; + } + + return this.db; + } + + // 初始化数据库 + async initialize(callbacks?: DatabaseLoadingCallbacks): Promise { + if (this.initPromise) return this.initPromise; + + this.callbacks = callbacks; + + this.initPromise = (async () => { + try { + if (this.dbInstance) return this.dbInstance; + + const time = Date.now(); + // 初始化数据库 + this.callbacks?.onStateChange?.(DatabaseLoadingState.Initializing); + + // 加载依赖 + const { PGlite, vector, drizzle, IdbFs, MemoryFS } = await this.loadDependencies(); + + // 加载并编译 WASM 模块 + const wasmModule = await this.loadWasmModule(); + + const db = new PGlite({ + extensions: { vector }, + fs: typeof window === 'undefined' ? new MemoryFS('lobechat') : new IdbFs('lobechat'), + relaxedDurability: true, + wasmModule, + }); + + this.dbInstance = drizzle({ client: db, schema }); + + await this.migrate(true); + + this.callbacks?.onStateChange?.(DatabaseLoadingState.Finished); + console.log(`✅ Database initialized in ${Date.now() - time}ms`); + + await sleep(50); + + this.callbacks?.onStateChange?.(DatabaseLoadingState.Ready); + + return this.dbInstance as DrizzleInstance; + } catch (e) { + this.initPromise = null; + this.callbacks?.onStateChange?.(DatabaseLoadingState.Error); + const error = e as Error; + this.callbacks?.onError?.({ + message: error.message, + name: error.name, + stack: error.stack, + }); + throw error; + } + })(); + + return this.initPromise; + } + + // 获取数据库实例 + get db(): DrizzleInstance { + if (!this.dbInstance) { + throw new Error('Database not initialized. Please call initialize() first.'); + } + return this.dbInstance; + } + + // 创建代理对象 + createProxy(): DrizzleInstance { + return new Proxy({} as DrizzleInstance, { + get: (target, prop) => { + return this.db[prop as keyof DrizzleInstance]; + }, + }); + } +} + +// 导出单例 +const dbManager = DatabaseManager.getInstance(); + +// 保持原有的 clientDB 导出不变 +export const clientDB = dbManager.createProxy(); + +// 导出初始化方法,供应用启动时使用 +export const initializeDB = (callbacks?: DatabaseLoadingCallbacks) => + dbManager.initialize(callbacks); diff --git a/src/database/client/migrations.json b/src/database/client/migrations.json new file mode 100644 index 000000000000..f6600bba1d51 --- /dev/null +++ b/src/database/client/migrations.json @@ -0,0 +1,289 @@ +[ + { + "sql": [ + "CREATE TABLE IF NOT EXISTS \"agents\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"slug\" varchar(100),\n\t\"title\" text,\n\t\"description\" text,\n\t\"tags\" jsonb DEFAULT '[]'::jsonb,\n\t\"avatar\" text,\n\t\"background_color\" text,\n\t\"plugins\" jsonb DEFAULT '[]'::jsonb,\n\t\"user_id\" text NOT NULL,\n\t\"chat_config\" jsonb,\n\t\"few_shots\" jsonb,\n\t\"model\" text,\n\t\"params\" jsonb DEFAULT '{}'::jsonb,\n\t\"provider\" text,\n\t\"system_role\" text,\n\t\"tts\" jsonb,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\tCONSTRAINT \"agents_slug_unique\" UNIQUE(\"slug\")\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"agents_tags\" (\n\t\"agent_id\" text NOT NULL,\n\t\"tag_id\" integer NOT NULL,\n\tCONSTRAINT \"agents_tags_agent_id_tag_id_pk\" PRIMARY KEY(\"agent_id\",\"tag_id\")\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"agents_to_sessions\" (\n\t\"agent_id\" text NOT NULL,\n\t\"session_id\" text NOT NULL,\n\tCONSTRAINT \"agents_to_sessions_agent_id_session_id_pk\" PRIMARY KEY(\"agent_id\",\"session_id\")\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"files\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"user_id\" text NOT NULL,\n\t\"file_type\" varchar(255) NOT NULL,\n\t\"name\" text NOT NULL,\n\t\"size\" integer NOT NULL,\n\t\"url\" text NOT NULL,\n\t\"metadata\" jsonb,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"files_to_agents\" (\n\t\"file_id\" text NOT NULL,\n\t\"agent_id\" text NOT NULL,\n\tCONSTRAINT \"files_to_agents_file_id_agent_id_pk\" PRIMARY KEY(\"file_id\",\"agent_id\")\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"files_to_messages\" (\n\t\"file_id\" text NOT NULL,\n\t\"message_id\" text NOT NULL,\n\tCONSTRAINT \"files_to_messages_file_id_message_id_pk\" PRIMARY KEY(\"file_id\",\"message_id\")\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"files_to_sessions\" (\n\t\"file_id\" text NOT NULL,\n\t\"session_id\" text NOT NULL,\n\tCONSTRAINT \"files_to_sessions_file_id_session_id_pk\" PRIMARY KEY(\"file_id\",\"session_id\")\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"user_installed_plugins\" (\n\t\"user_id\" text NOT NULL,\n\t\"identifier\" text NOT NULL,\n\t\"type\" text NOT NULL,\n\t\"manifest\" jsonb,\n\t\"settings\" jsonb,\n\t\"custom_params\" jsonb,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\tCONSTRAINT \"user_installed_plugins_user_id_identifier_pk\" PRIMARY KEY(\"user_id\",\"identifier\")\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"market\" (\n\t\"id\" serial PRIMARY KEY NOT NULL,\n\t\"agent_id\" text,\n\t\"plugin_id\" integer,\n\t\"type\" text NOT NULL,\n\t\"view\" integer DEFAULT 0,\n\t\"like\" integer DEFAULT 0,\n\t\"used\" integer DEFAULT 0,\n\t\"user_id\" text NOT NULL,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"message_plugins\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"tool_call_id\" text,\n\t\"type\" text DEFAULT 'default',\n\t\"api_name\" text,\n\t\"arguments\" text,\n\t\"identifier\" text,\n\t\"state\" jsonb,\n\t\"error\" jsonb\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"message_tts\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"content_md5\" text,\n\t\"file_id\" text,\n\t\"voice\" text\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"message_translates\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"content\" text,\n\t\"from\" text,\n\t\"to\" text\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"messages\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"role\" text NOT NULL,\n\t\"content\" text,\n\t\"model\" text,\n\t\"provider\" text,\n\t\"favorite\" boolean DEFAULT false,\n\t\"error\" jsonb,\n\t\"tools\" jsonb,\n\t\"trace_id\" text,\n\t\"observation_id\" text,\n\t\"user_id\" text NOT NULL,\n\t\"session_id\" text,\n\t\"topic_id\" text,\n\t\"parent_id\" text,\n\t\"quota_id\" text,\n\t\"agent_id\" text,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"plugins\" (\n\t\"id\" serial PRIMARY KEY NOT NULL,\n\t\"identifier\" text NOT NULL,\n\t\"title\" text NOT NULL,\n\t\"description\" text,\n\t\"avatar\" text,\n\t\"author\" text,\n\t\"manifest\" text NOT NULL,\n\t\"locale\" text NOT NULL,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\tCONSTRAINT \"plugins_identifier_unique\" UNIQUE(\"identifier\")\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"plugins_tags\" (\n\t\"plugin_id\" integer NOT NULL,\n\t\"tag_id\" integer NOT NULL,\n\tCONSTRAINT \"plugins_tags_plugin_id_tag_id_pk\" PRIMARY KEY(\"plugin_id\",\"tag_id\")\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"session_groups\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"name\" text NOT NULL,\n\t\"sort\" integer,\n\t\"user_id\" text NOT NULL,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"sessions\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"slug\" varchar(100) NOT NULL,\n\t\"title\" text,\n\t\"description\" text,\n\t\"avatar\" text,\n\t\"background_color\" text,\n\t\"type\" text DEFAULT 'agent',\n\t\"user_id\" text NOT NULL,\n\t\"group_id\" text,\n\t\"pinned\" boolean DEFAULT false,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"tags\" (\n\t\"id\" serial PRIMARY KEY NOT NULL,\n\t\"slug\" text NOT NULL,\n\t\"name\" text,\n\t\"user_id\" text NOT NULL,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\tCONSTRAINT \"tags_slug_unique\" UNIQUE(\"slug\")\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"topics\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"session_id\" text,\n\t\"user_id\" text NOT NULL,\n\t\"favorite\" boolean DEFAULT false,\n\t\"title\" text,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"user_settings\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"tts\" jsonb,\n\t\"key_vaults\" text,\n\t\"general\" jsonb,\n\t\"language_model\" jsonb,\n\t\"system_agent\" jsonb,\n\t\"default_agent\" jsonb,\n\t\"tool\" jsonb\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"users\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"username\" text,\n\t\"email\" text,\n\t\"avatar\" text,\n\t\"phone\" text,\n\t\"first_name\" text,\n\t\"last_name\" text,\n\t\"is_onboarded\" boolean DEFAULT false,\n\t\"clerk_created_at\" timestamp with time zone,\n\t\"preference\" jsonb DEFAULT '{\"guide\":{\"moveSettingsToAvatar\":true,\"topic\":true},\"telemetry\":null,\"useCmdEnterToSend\":false}'::jsonb,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"key\" text,\n\tCONSTRAINT \"users_username_unique\" UNIQUE(\"username\")\n);\n", + "\nDO $$ BEGIN\n ALTER TABLE \"agents\" ADD CONSTRAINT \"agents_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"agents_tags\" ADD CONSTRAINT \"agents_tags_agent_id_agents_id_fk\" FOREIGN KEY (\"agent_id\") REFERENCES \"public\".\"agents\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"agents_tags\" ADD CONSTRAINT \"agents_tags_tag_id_tags_id_fk\" FOREIGN KEY (\"tag_id\") REFERENCES \"public\".\"tags\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"agents_to_sessions\" ADD CONSTRAINT \"agents_to_sessions_agent_id_agents_id_fk\" FOREIGN KEY (\"agent_id\") REFERENCES \"public\".\"agents\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"agents_to_sessions\" ADD CONSTRAINT \"agents_to_sessions_session_id_sessions_id_fk\" FOREIGN KEY (\"session_id\") REFERENCES \"public\".\"sessions\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"files\" ADD CONSTRAINT \"files_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"files_to_agents\" ADD CONSTRAINT \"files_to_agents_file_id_files_id_fk\" FOREIGN KEY (\"file_id\") REFERENCES \"public\".\"files\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"files_to_agents\" ADD CONSTRAINT \"files_to_agents_agent_id_agents_id_fk\" FOREIGN KEY (\"agent_id\") REFERENCES \"public\".\"agents\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"files_to_messages\" ADD CONSTRAINT \"files_to_messages_file_id_files_id_fk\" FOREIGN KEY (\"file_id\") REFERENCES \"public\".\"files\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"files_to_messages\" ADD CONSTRAINT \"files_to_messages_message_id_messages_id_fk\" FOREIGN KEY (\"message_id\") REFERENCES \"public\".\"messages\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"files_to_sessions\" ADD CONSTRAINT \"files_to_sessions_file_id_files_id_fk\" FOREIGN KEY (\"file_id\") REFERENCES \"public\".\"files\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"files_to_sessions\" ADD CONSTRAINT \"files_to_sessions_session_id_sessions_id_fk\" FOREIGN KEY (\"session_id\") REFERENCES \"public\".\"sessions\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"user_installed_plugins\" ADD CONSTRAINT \"user_installed_plugins_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"market\" ADD CONSTRAINT \"market_agent_id_agents_id_fk\" FOREIGN KEY (\"agent_id\") REFERENCES \"public\".\"agents\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"market\" ADD CONSTRAINT \"market_plugin_id_plugins_id_fk\" FOREIGN KEY (\"plugin_id\") REFERENCES \"public\".\"plugins\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"market\" ADD CONSTRAINT \"market_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"message_plugins\" ADD CONSTRAINT \"message_plugins_id_messages_id_fk\" FOREIGN KEY (\"id\") REFERENCES \"public\".\"messages\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"message_tts\" ADD CONSTRAINT \"message_tts_id_messages_id_fk\" FOREIGN KEY (\"id\") REFERENCES \"public\".\"messages\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"message_tts\" ADD CONSTRAINT \"message_tts_file_id_files_id_fk\" FOREIGN KEY (\"file_id\") REFERENCES \"public\".\"files\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"message_translates\" ADD CONSTRAINT \"message_translates_id_messages_id_fk\" FOREIGN KEY (\"id\") REFERENCES \"public\".\"messages\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"messages\" ADD CONSTRAINT \"messages_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"messages\" ADD CONSTRAINT \"messages_session_id_sessions_id_fk\" FOREIGN KEY (\"session_id\") REFERENCES \"public\".\"sessions\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"messages\" ADD CONSTRAINT \"messages_topic_id_topics_id_fk\" FOREIGN KEY (\"topic_id\") REFERENCES \"public\".\"topics\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"messages\" ADD CONSTRAINT \"messages_parent_id_messages_id_fk\" FOREIGN KEY (\"parent_id\") REFERENCES \"public\".\"messages\"(\"id\") ON DELETE set null ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"messages\" ADD CONSTRAINT \"messages_quota_id_messages_id_fk\" FOREIGN KEY (\"quota_id\") REFERENCES \"public\".\"messages\"(\"id\") ON DELETE set null ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"messages\" ADD CONSTRAINT \"messages_agent_id_agents_id_fk\" FOREIGN KEY (\"agent_id\") REFERENCES \"public\".\"agents\"(\"id\") ON DELETE set null ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"plugins_tags\" ADD CONSTRAINT \"plugins_tags_plugin_id_plugins_id_fk\" FOREIGN KEY (\"plugin_id\") REFERENCES \"public\".\"plugins\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"plugins_tags\" ADD CONSTRAINT \"plugins_tags_tag_id_tags_id_fk\" FOREIGN KEY (\"tag_id\") REFERENCES \"public\".\"tags\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"session_groups\" ADD CONSTRAINT \"session_groups_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"sessions\" ADD CONSTRAINT \"sessions_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"sessions\" ADD CONSTRAINT \"sessions_group_id_session_groups_id_fk\" FOREIGN KEY (\"group_id\") REFERENCES \"public\".\"session_groups\"(\"id\") ON DELETE set null ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"tags\" ADD CONSTRAINT \"tags_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"topics\" ADD CONSTRAINT \"topics_session_id_sessions_id_fk\" FOREIGN KEY (\"session_id\") REFERENCES \"public\".\"sessions\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"topics\" ADD CONSTRAINT \"topics_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"user_settings\" ADD CONSTRAINT \"user_settings_id_users_id_fk\" FOREIGN KEY (\"id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nCREATE INDEX IF NOT EXISTS \"messages_created_at_idx\" ON \"messages\" (\"created_at\");", + "\nCREATE UNIQUE INDEX IF NOT EXISTS \"slug_user_id_unique\" ON \"sessions\" (\"slug\",\"user_id\");\n" + ], + "bps": true, + "folderMillis": 1716982944425, + "hash": "1513c1da50dc083fc0bd9783fe88c60e4fa80b60db645aa87bfda54332252c65" + }, + { + "sql": [ + "ALTER TABLE \"messages\" ADD COLUMN \"client_id\" text;", + "\nALTER TABLE \"session_groups\" ADD COLUMN \"client_id\" text;", + "\nALTER TABLE \"sessions\" ADD COLUMN \"client_id\" text;", + "\nALTER TABLE \"topics\" ADD COLUMN \"client_id\" text;", + "\nCREATE INDEX IF NOT EXISTS \"messages_client_id_idx\" ON \"messages\" (\"client_id\");", + "\nALTER TABLE \"messages\" ADD CONSTRAINT \"messages_client_id_unique\" UNIQUE(\"client_id\");", + "\nALTER TABLE \"session_groups\" ADD CONSTRAINT \"session_groups_client_id_unique\" UNIQUE(\"client_id\");", + "\nALTER TABLE \"sessions\" ADD CONSTRAINT \"sessions_client_id_unique\" UNIQUE(\"client_id\");", + "\nALTER TABLE \"topics\" ADD CONSTRAINT \"topics_client_id_unique\" UNIQUE(\"client_id\");\n" + ], + "bps": true, + "folderMillis": 1717153686544, + "hash": "ddb29ee7e7a675c12b44996e4be061b1736e8f785052242801f4cdfb2a94f258" + }, + { + "sql": [ + "ALTER TABLE \"messages\" DROP CONSTRAINT \"messages_client_id_unique\";", + "\nALTER TABLE \"session_groups\" DROP CONSTRAINT \"session_groups_client_id_unique\";", + "\nALTER TABLE \"sessions\" DROP CONSTRAINT \"sessions_client_id_unique\";", + "\nALTER TABLE \"topics\" DROP CONSTRAINT \"topics_client_id_unique\";", + "\nDROP INDEX IF EXISTS \"messages_client_id_idx\";", + "\nCREATE UNIQUE INDEX IF NOT EXISTS \"message_client_id_user_unique\" ON \"messages\" (\"client_id\",\"user_id\");", + "\nALTER TABLE \"session_groups\" ADD CONSTRAINT \"session_group_client_id_user_unique\" UNIQUE(\"client_id\",\"user_id\");", + "\nALTER TABLE \"sessions\" ADD CONSTRAINT \"sessions_client_id_user_id_unique\" UNIQUE(\"client_id\",\"user_id\");", + "\nALTER TABLE \"topics\" ADD CONSTRAINT \"topic_client_id_user_id_unique\" UNIQUE(\"client_id\",\"user_id\");" + ], + "bps": true, + "folderMillis": 1717587734458, + "hash": "90b61fc3e744d8e2609418d9e25274ff07af4caf87370bb614db511d67900d73" + }, + { + "sql": [ + "CREATE TABLE IF NOT EXISTS \"user_budgets\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"free_budget_id\" text,\n\t\"free_budget_key\" text,\n\t\"subscription_budget_id\" text,\n\t\"subscription_budget_key\" text,\n\t\"package_budget_id\" text,\n\t\"package_budget_key\" text,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"user_subscriptions\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"user_id\" text NOT NULL,\n\t\"stripe_id\" text,\n\t\"currency\" text,\n\t\"pricing\" integer,\n\t\"billing_paid_at\" integer,\n\t\"billing_cycle_start\" integer,\n\t\"billing_cycle_end\" integer,\n\t\"cancel_at_period_end\" boolean,\n\t\"cancel_at\" integer,\n\t\"next_billing\" jsonb,\n\t\"plan\" text,\n\t\"recurring\" text,\n\t\"storage_limit\" integer,\n\t\"status\" integer,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n", + "\nALTER TABLE \"users\" ALTER COLUMN \"preference\" DROP DEFAULT;", + "\nDO $$ BEGIN\n ALTER TABLE \"user_budgets\" ADD CONSTRAINT \"user_budgets_id_users_id_fk\" FOREIGN KEY (\"id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"user_subscriptions\" ADD CONSTRAINT \"user_subscriptions_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nALTER TABLE \"users\" DROP COLUMN IF EXISTS \"key\";\n" + ], + "bps": true, + "folderMillis": 1718460779230, + "hash": "535a9aba48be3d75762f29bbb195736f17abfe51f41a548debe925949dd0caf2" + }, + { + "sql": [ + "CREATE TABLE IF NOT EXISTS \"nextauth_accounts\" (\n\t\"access_token\" text,\n\t\"expires_at\" integer,\n\t\"id_token\" text,\n\t\"provider\" text NOT NULL,\n\t\"providerAccountId\" text NOT NULL,\n\t\"refresh_token\" text,\n\t\"scope\" text,\n\t\"session_state\" text,\n\t\"token_type\" text,\n\t\"type\" text NOT NULL,\n\t\"userId\" text NOT NULL,\n\tCONSTRAINT \"nextauth_accounts_provider_providerAccountId_pk\" PRIMARY KEY(\"provider\",\"providerAccountId\")\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"nextauth_authenticators\" (\n\t\"counter\" integer NOT NULL,\n\t\"credentialBackedUp\" boolean NOT NULL,\n\t\"credentialDeviceType\" text NOT NULL,\n\t\"credentialID\" text NOT NULL,\n\t\"credentialPublicKey\" text NOT NULL,\n\t\"providerAccountId\" text NOT NULL,\n\t\"transports\" text,\n\t\"userId\" text NOT NULL,\n\tCONSTRAINT \"nextauth_authenticators_userId_credentialID_pk\" PRIMARY KEY(\"userId\",\"credentialID\"),\n\tCONSTRAINT \"nextauth_authenticators_credentialID_unique\" UNIQUE(\"credentialID\")\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"nextauth_sessions\" (\n\t\"expires\" timestamp NOT NULL,\n\t\"sessionToken\" text PRIMARY KEY NOT NULL,\n\t\"userId\" text NOT NULL\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"nextauth_verificationtokens\" (\n\t\"expires\" timestamp NOT NULL,\n\t\"identifier\" text NOT NULL,\n\t\"token\" text NOT NULL,\n\tCONSTRAINT \"nextauth_verificationtokens_identifier_token_pk\" PRIMARY KEY(\"identifier\",\"token\")\n);\n", + "\nALTER TABLE \"users\" ADD COLUMN \"full_name\" text;", + "\nALTER TABLE \"users\" ADD COLUMN \"email_verified_at\" timestamp with time zone;", + "\nDO $$ BEGIN\n ALTER TABLE \"nextauth_accounts\" ADD CONSTRAINT \"nextauth_accounts_userId_users_id_fk\" FOREIGN KEY (\"userId\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"nextauth_authenticators\" ADD CONSTRAINT \"nextauth_authenticators_userId_users_id_fk\" FOREIGN KEY (\"userId\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"nextauth_sessions\" ADD CONSTRAINT \"nextauth_sessions_userId_users_id_fk\" FOREIGN KEY (\"userId\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n" + ], + "bps": true, + "folderMillis": 1721724512422, + "hash": "c63c5819d73414632ea32c543cfb997be31a2be3fad635c148c97e726c57fd16" + }, + { + "sql": [ + "-- Custom SQL migration file, put you code below! --\nCREATE EXTENSION IF NOT EXISTS vector;\n" + ], + "bps": true, + "folderMillis": 1722944166657, + "hash": "c112a4eb471fa4efe791b250057a1e33040515a0c60361c7d7a59044ec9e1667" + }, + { + "sql": [ + "CREATE TABLE IF NOT EXISTS \"agents_files\" (\n\t\"file_id\" text NOT NULL,\n\t\"agent_id\" text NOT NULL,\n\t\"enabled\" boolean DEFAULT true,\n\t\"user_id\" text NOT NULL,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\tCONSTRAINT \"agents_files_file_id_agent_id_user_id_pk\" PRIMARY KEY(\"file_id\",\"agent_id\",\"user_id\")\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"agents_knowledge_bases\" (\n\t\"agent_id\" text NOT NULL,\n\t\"knowledge_base_id\" text NOT NULL,\n\t\"user_id\" text NOT NULL,\n\t\"enabled\" boolean DEFAULT true,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\tCONSTRAINT \"agents_knowledge_bases_agent_id_knowledge_base_id_pk\" PRIMARY KEY(\"agent_id\",\"knowledge_base_id\")\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"async_tasks\" (\n\t\"id\" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,\n\t\"type\" text,\n\t\"status\" text,\n\t\"error\" jsonb,\n\t\"user_id\" text NOT NULL,\n\t\"duration\" integer,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"file_chunks\" (\n\t\"file_id\" varchar,\n\t\"chunk_id\" uuid,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\tCONSTRAINT \"file_chunks_file_id_chunk_id_pk\" PRIMARY KEY(\"file_id\",\"chunk_id\")\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"global_files\" (\n\t\"hash_id\" varchar(64) PRIMARY KEY NOT NULL,\n\t\"file_type\" varchar(255) NOT NULL,\n\t\"size\" integer NOT NULL,\n\t\"url\" text NOT NULL,\n\t\"metadata\" jsonb,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"knowledge_base_files\" (\n\t\"knowledge_base_id\" text NOT NULL,\n\t\"file_id\" text NOT NULL,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\tCONSTRAINT \"knowledge_base_files_knowledge_base_id_file_id_pk\" PRIMARY KEY(\"knowledge_base_id\",\"file_id\")\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"knowledge_bases\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"name\" text NOT NULL,\n\t\"description\" text,\n\t\"avatar\" text,\n\t\"type\" text,\n\t\"user_id\" text NOT NULL,\n\t\"is_public\" boolean DEFAULT false,\n\t\"settings\" jsonb,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"message_chunks\" (\n\t\"message_id\" text,\n\t\"chunk_id\" uuid,\n\tCONSTRAINT \"message_chunks_chunk_id_message_id_pk\" PRIMARY KEY(\"chunk_id\",\"message_id\")\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"message_queries\" (\n\t\"id\" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,\n\t\"message_id\" text NOT NULL,\n\t\"rewrite_query\" text,\n\t\"user_query\" text,\n\t\"embeddings_id\" uuid\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"message_query_chunks\" (\n\t\"id\" text,\n\t\"query_id\" uuid,\n\t\"chunk_id\" uuid,\n\t\"similarity\" numeric(6, 5),\n\tCONSTRAINT \"message_query_chunks_chunk_id_id_query_id_pk\" PRIMARY KEY(\"chunk_id\",\"id\",\"query_id\")\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"chunks\" (\n\t\"id\" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,\n\t\"text\" text,\n\t\"abstract\" text,\n\t\"metadata\" jsonb,\n\t\"index\" integer,\n\t\"type\" varchar,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"user_id\" text\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"embeddings\" (\n\t\"id\" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,\n\t\"chunk_id\" uuid,\n\t\"embeddings\" vector(1024),\n\t\"model\" text,\n\t\"user_id\" text\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"unstructured_chunks\" (\n\t\"id\" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,\n\t\"text\" text,\n\t\"metadata\" jsonb,\n\t\"index\" integer,\n\t\"type\" varchar,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"parent_id\" varchar,\n\t\"composite_id\" uuid,\n\t\"user_id\" text,\n\t\"file_id\" varchar\n);\n", + "\nALTER TABLE \"files_to_messages\" RENAME TO \"messages_files\";", + "\nDROP TABLE \"files_to_agents\";", + "\nALTER TABLE \"files\" ADD COLUMN \"file_hash\" varchar(64);", + "\nALTER TABLE \"files\" ADD COLUMN \"chunk_task_id\" uuid;", + "\nALTER TABLE \"files\" ADD COLUMN \"embedding_task_id\" uuid;", + "\nDO $$ BEGIN\n ALTER TABLE \"agents_files\" ADD CONSTRAINT \"agents_files_file_id_files_id_fk\" FOREIGN KEY (\"file_id\") REFERENCES \"public\".\"files\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"agents_files\" ADD CONSTRAINT \"agents_files_agent_id_agents_id_fk\" FOREIGN KEY (\"agent_id\") REFERENCES \"public\".\"agents\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"agents_files\" ADD CONSTRAINT \"agents_files_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"agents_knowledge_bases\" ADD CONSTRAINT \"agents_knowledge_bases_agent_id_agents_id_fk\" FOREIGN KEY (\"agent_id\") REFERENCES \"public\".\"agents\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"agents_knowledge_bases\" ADD CONSTRAINT \"agents_knowledge_bases_knowledge_base_id_knowledge_bases_id_fk\" FOREIGN KEY (\"knowledge_base_id\") REFERENCES \"public\".\"knowledge_bases\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"agents_knowledge_bases\" ADD CONSTRAINT \"agents_knowledge_bases_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"async_tasks\" ADD CONSTRAINT \"async_tasks_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"file_chunks\" ADD CONSTRAINT \"file_chunks_file_id_files_id_fk\" FOREIGN KEY (\"file_id\") REFERENCES \"public\".\"files\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"file_chunks\" ADD CONSTRAINT \"file_chunks_chunk_id_chunks_id_fk\" FOREIGN KEY (\"chunk_id\") REFERENCES \"public\".\"chunks\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"knowledge_base_files\" ADD CONSTRAINT \"knowledge_base_files_knowledge_base_id_knowledge_bases_id_fk\" FOREIGN KEY (\"knowledge_base_id\") REFERENCES \"public\".\"knowledge_bases\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"knowledge_base_files\" ADD CONSTRAINT \"knowledge_base_files_file_id_files_id_fk\" FOREIGN KEY (\"file_id\") REFERENCES \"public\".\"files\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"knowledge_bases\" ADD CONSTRAINT \"knowledge_bases_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"message_chunks\" ADD CONSTRAINT \"message_chunks_message_id_messages_id_fk\" FOREIGN KEY (\"message_id\") REFERENCES \"public\".\"messages\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"message_chunks\" ADD CONSTRAINT \"message_chunks_chunk_id_chunks_id_fk\" FOREIGN KEY (\"chunk_id\") REFERENCES \"public\".\"chunks\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"message_queries\" ADD CONSTRAINT \"message_queries_message_id_messages_id_fk\" FOREIGN KEY (\"message_id\") REFERENCES \"public\".\"messages\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"message_queries\" ADD CONSTRAINT \"message_queries_embeddings_id_embeddings_id_fk\" FOREIGN KEY (\"embeddings_id\") REFERENCES \"public\".\"embeddings\"(\"id\") ON DELETE set null ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"message_query_chunks\" ADD CONSTRAINT \"message_query_chunks_id_messages_id_fk\" FOREIGN KEY (\"id\") REFERENCES \"public\".\"messages\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"message_query_chunks\" ADD CONSTRAINT \"message_query_chunks_query_id_message_queries_id_fk\" FOREIGN KEY (\"query_id\") REFERENCES \"public\".\"message_queries\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"message_query_chunks\" ADD CONSTRAINT \"message_query_chunks_chunk_id_chunks_id_fk\" FOREIGN KEY (\"chunk_id\") REFERENCES \"public\".\"chunks\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"messages_files\" ADD CONSTRAINT \"messages_files_file_id_files_id_fk\" FOREIGN KEY (\"file_id\") REFERENCES \"public\".\"files\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"messages_files\" ADD CONSTRAINT \"messages_files_message_id_messages_id_fk\" FOREIGN KEY (\"message_id\") REFERENCES \"public\".\"messages\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"chunks\" ADD CONSTRAINT \"chunks_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"embeddings\" ADD CONSTRAINT \"embeddings_chunk_id_chunks_id_fk\" FOREIGN KEY (\"chunk_id\") REFERENCES \"public\".\"chunks\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"embeddings\" ADD CONSTRAINT \"embeddings_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"unstructured_chunks\" ADD CONSTRAINT \"unstructured_chunks_composite_id_chunks_id_fk\" FOREIGN KEY (\"composite_id\") REFERENCES \"public\".\"chunks\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"unstructured_chunks\" ADD CONSTRAINT \"unstructured_chunks_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"unstructured_chunks\" ADD CONSTRAINT \"unstructured_chunks_file_id_files_id_fk\" FOREIGN KEY (\"file_id\") REFERENCES \"public\".\"files\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"files\" ADD CONSTRAINT \"files_file_hash_global_files_hash_id_fk\" FOREIGN KEY (\"file_hash\") REFERENCES \"public\".\"global_files\"(\"hash_id\") ON DELETE no action ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"files\" ADD CONSTRAINT \"files_chunk_task_id_async_tasks_id_fk\" FOREIGN KEY (\"chunk_task_id\") REFERENCES \"public\".\"async_tasks\"(\"id\") ON DELETE set null ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"files\" ADD CONSTRAINT \"files_embedding_task_id_async_tasks_id_fk\" FOREIGN KEY (\"embedding_task_id\") REFERENCES \"public\".\"async_tasks\"(\"id\") ON DELETE set null ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n" + ], + "bps": true, + "folderMillis": 1724089032064, + "hash": "bc4e36664868d14888b9e9aef180b3e02c563fa3c253111787e68b8ea4cd995f" + }, + { + "sql": [ + "-- step 1: create a temporary table to store the rows we want to keep\nCREATE TEMP TABLE embeddings_temp AS\nSELECT DISTINCT ON (chunk_id) *\nFROM embeddings\nORDER BY chunk_id, random();\n", + "\n\n-- step 2: delete all rows from the original table\nDELETE FROM embeddings;\n", + "\n\n-- step 3: insert the rows we want to keep back into the original table\nINSERT INTO embeddings\nSELECT * FROM embeddings_temp;\n", + "\n\n-- step 4: drop the temporary table\nDROP TABLE embeddings_temp;\n", + "\n\n-- step 5: now it's safe to add the unique constraint\nALTER TABLE \"embeddings\" ADD CONSTRAINT \"embeddings_chunk_id_unique\" UNIQUE(\"chunk_id\");\n" + ], + "bps": true, + "folderMillis": 1724254147447, + "hash": "e99840848ffbb33ca4d7ead6158f02b8d12cb4ff5706d4529d7fa586afa4c2a9" + }, + { + "sql": [ + "CREATE TABLE IF NOT EXISTS \"rag_eval_dataset_records\" (\n\t\"id\" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name \"rag_eval_dataset_records_id_seq\" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),\n\t\"dataset_id\" integer NOT NULL,\n\t\"ideal\" text,\n\t\"question\" text,\n\t\"reference_files\" text[],\n\t\"metadata\" jsonb,\n\t\"user_id\" text,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"rag_eval_datasets\" (\n\t\"id\" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name \"rag_eval_datasets_id_seq\" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 30000 CACHE 1),\n\t\"description\" text,\n\t\"name\" text NOT NULL,\n\t\"knowledge_base_id\" text,\n\t\"user_id\" text,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"rag_eval_evaluations\" (\n\t\"id\" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name \"rag_eval_evaluations_id_seq\" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),\n\t\"name\" text NOT NULL,\n\t\"description\" text,\n\t\"eval_records_url\" text,\n\t\"status\" text,\n\t\"error\" jsonb,\n\t\"dataset_id\" integer NOT NULL,\n\t\"knowledge_base_id\" text,\n\t\"language_model\" text,\n\t\"embedding_model\" text,\n\t\"user_id\" text,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n", + "\nCREATE TABLE IF NOT EXISTS \"rag_eval_evaluation_records\" (\n\t\"id\" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name \"rag_eval_evaluation_records_id_seq\" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),\n\t\"question\" text NOT NULL,\n\t\"answer\" text,\n\t\"context\" text[],\n\t\"ideal\" text,\n\t\"status\" text,\n\t\"error\" jsonb,\n\t\"language_model\" text,\n\t\"embedding_model\" text,\n\t\"question_embedding_id\" uuid,\n\t\"duration\" integer,\n\t\"dataset_record_id\" integer NOT NULL,\n\t\"evaluation_id\" integer NOT NULL,\n\t\"user_id\" text,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n", + "\nDO $$ BEGIN\n ALTER TABLE \"rag_eval_dataset_records\" ADD CONSTRAINT \"rag_eval_dataset_records_dataset_id_rag_eval_datasets_id_fk\" FOREIGN KEY (\"dataset_id\") REFERENCES \"public\".\"rag_eval_datasets\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"rag_eval_dataset_records\" ADD CONSTRAINT \"rag_eval_dataset_records_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"rag_eval_datasets\" ADD CONSTRAINT \"rag_eval_datasets_knowledge_base_id_knowledge_bases_id_fk\" FOREIGN KEY (\"knowledge_base_id\") REFERENCES \"public\".\"knowledge_bases\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"rag_eval_datasets\" ADD CONSTRAINT \"rag_eval_datasets_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"rag_eval_evaluations\" ADD CONSTRAINT \"rag_eval_evaluations_dataset_id_rag_eval_datasets_id_fk\" FOREIGN KEY (\"dataset_id\") REFERENCES \"public\".\"rag_eval_datasets\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"rag_eval_evaluations\" ADD CONSTRAINT \"rag_eval_evaluations_knowledge_base_id_knowledge_bases_id_fk\" FOREIGN KEY (\"knowledge_base_id\") REFERENCES \"public\".\"knowledge_bases\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"rag_eval_evaluations\" ADD CONSTRAINT \"rag_eval_evaluations_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"rag_eval_evaluation_records\" ADD CONSTRAINT \"rag_eval_evaluation_records_question_embedding_id_embeddings_id_fk\" FOREIGN KEY (\"question_embedding_id\") REFERENCES \"public\".\"embeddings\"(\"id\") ON DELETE set null ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"rag_eval_evaluation_records\" ADD CONSTRAINT \"rag_eval_evaluation_records_dataset_record_id_rag_eval_dataset_records_id_fk\" FOREIGN KEY (\"dataset_record_id\") REFERENCES \"public\".\"rag_eval_dataset_records\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"rag_eval_evaluation_records\" ADD CONSTRAINT \"rag_eval_evaluation_records_evaluation_id_rag_eval_evaluations_id_fk\" FOREIGN KEY (\"evaluation_id\") REFERENCES \"public\".\"rag_eval_evaluations\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"rag_eval_evaluation_records\" ADD CONSTRAINT \"rag_eval_evaluation_records_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n" + ], + "bps": true, + "folderMillis": 1725366565650, + "hash": "9646161fa041354714f823d726af27247bcd6e60fa3be5698c0d69f337a5700b" + }, + { + "sql": [ + "DROP TABLE \"user_budgets\";", + "\nDROP TABLE \"user_subscriptions\";" + ], + "bps": true, + "folderMillis": 1729699958471, + "hash": "7dad43a2a25d1aec82124a4e53f8d82f8505c3073f23606c1dc5d2a4598eacf9" + }, + { + "sql": [ + "DROP TABLE \"agents_tags\" CASCADE;", + "\nDROP TABLE \"market\" CASCADE;", + "\nDROP TABLE \"plugins\" CASCADE;", + "\nDROP TABLE \"plugins_tags\" CASCADE;", + "\nDROP TABLE \"tags\" CASCADE;", + "\nALTER TABLE \"agents\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;", + "\nALTER TABLE \"agents_files\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;", + "\nALTER TABLE \"agents_knowledge_bases\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;", + "\nALTER TABLE \"async_tasks\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;", + "\nALTER TABLE \"files\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;", + "\nALTER TABLE \"global_files\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;", + "\nALTER TABLE \"knowledge_bases\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;", + "\nALTER TABLE \"messages\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;", + "\nALTER TABLE \"chunks\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;", + "\nALTER TABLE \"unstructured_chunks\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;", + "\nALTER TABLE \"rag_eval_dataset_records\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;", + "\nALTER TABLE \"rag_eval_dataset_records\" ADD COLUMN \"updated_at\" timestamp with time zone DEFAULT now() NOT NULL;", + "\nALTER TABLE \"rag_eval_datasets\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;", + "\nALTER TABLE \"rag_eval_evaluations\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;", + "\nALTER TABLE \"rag_eval_evaluation_records\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;", + "\nALTER TABLE \"rag_eval_evaluation_records\" ADD COLUMN \"updated_at\" timestamp with time zone DEFAULT now() NOT NULL;", + "\nALTER TABLE \"session_groups\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;", + "\nALTER TABLE \"sessions\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;", + "\nALTER TABLE \"topics\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;", + "\nALTER TABLE \"user_installed_plugins\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;", + "\nALTER TABLE \"users\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;" + ], + "bps": true, + "folderMillis": 1730900133049, + "hash": "a7d801b679e25ef3ffda343366992b2835c089363e9d7c09074336d40e438004" + }, + { + "sql": [ + "ALTER TABLE \"topics\" ADD COLUMN \"history_summary\" text;", + "\nALTER TABLE \"topics\" ADD COLUMN \"metadata\" jsonb;\n" + ], + "bps": true, + "folderMillis": 1731138670427, + "hash": "80c2eae0600190b354e4fd6b619687a66186b992ec687495bb55c6c163a98fa6" + }, + { + "sql": [ + "CREATE TABLE IF NOT EXISTS \"threads\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"title\" text,\n\t\"type\" text NOT NULL,\n\t\"status\" text DEFAULT 'active',\n\t\"topic_id\" text NOT NULL,\n\t\"source_message_id\" text NOT NULL,\n\t\"parent_thread_id\" text,\n\t\"user_id\" text NOT NULL,\n\t\"last_active_at\" timestamp with time zone DEFAULT now(),\n\t\"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n", + "\nALTER TABLE \"messages\" ADD COLUMN \"thread_id\" text;", + "\nDO $$ BEGIN\n ALTER TABLE \"threads\" ADD CONSTRAINT \"threads_topic_id_topics_id_fk\" FOREIGN KEY (\"topic_id\") REFERENCES \"public\".\"topics\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"threads\" ADD CONSTRAINT \"threads_parent_thread_id_threads_id_fk\" FOREIGN KEY (\"parent_thread_id\") REFERENCES \"public\".\"threads\"(\"id\") ON DELETE set null ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"threads\" ADD CONSTRAINT \"threads_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n", + "\nDO $$ BEGIN\n ALTER TABLE \"messages\" ADD CONSTRAINT \"messages_thread_id_threads_id_fk\" FOREIGN KEY (\"thread_id\") REFERENCES \"public\".\"threads\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n" + ], + "bps": true, + "folderMillis": 1731858381716, + "hash": "d8263bfefe296ed366379c7b7fc65195d12e6a1c0a9f1c96097ea28f2123fe50" + } +] \ No newline at end of file diff --git a/src/features/InitClientDB/EnableModal.tsx b/src/features/InitClientDB/EnableModal.tsx new file mode 100644 index 000000000000..967084de7381 --- /dev/null +++ b/src/features/InitClientDB/EnableModal.tsx @@ -0,0 +1,111 @@ +import { Icon } from '@lobehub/ui'; +import { Button } from 'antd'; +import { createStyles } from 'antd-style'; +import { CpuIcon, LibraryBig, ShieldCheck } from 'lucide-react'; +import { memo } from 'react'; +import { useTranslation } from 'react-i18next'; +import { Center, Flexbox } from 'react-layout-kit'; + +import DataStyleModal from '@/components/DataStyleModal'; +import { useGlobalStore } from '@/store/global'; + +import { PGliteSVG } from './PGliteSVG'; + +const useStyles = createStyles(({ css, token, isDarkMode, responsive }) => ({ + desc: css` + width: 280px; + color: ${token.colorTextSecondary}; + + ${responsive.mobile} { + line-height: ${token.lineHeight}; + } + `, + hint: css` + font-size: ${token.fontSizeSM}px; + color: ${token.colorTextTertiary}; + text-align: center; + `, + icon: css` + color: ${isDarkMode ? token.blue : token.geekblue}; + `, + iconCtn: css` + width: 72px; + height: 72px; + background: ${isDarkMode ? token.blue1 : token.geekblue1}; + border-radius: 50%; + `, + intro: css` + ${responsive.mobile} { + width: 350px; + margin-block-start: 24px; + line-height: ${token.lineHeight}; + } + `, + + title: css` + margin-block-end: 0; + font-size: ${token.fontSizeLG}px; + font-weight: bold; + `, +})); + +interface EnableClientDBModalProps { + open: boolean; +} + +const EnableClientDBModal = memo(({ open }) => { + const { t } = useTranslation('common'); + const { styles } = useStyles(); + const markPgliteEnabled = useGlobalStore((s) => s.markPgliteEnabled); + const features = [ + { + avatar: PGliteSVG, + desc: t('clientDB.modal.features.pglite.desc'), + title: t('clientDB.modal.features.pglite.title'), + }, + { + avatar: ShieldCheck, + desc: t('clientDB.modal.features.localFirst.desc'), + title: t('clientDB.modal.features.localFirst.title'), + }, + { + avatar: LibraryBig, + desc: t('clientDB.modal.features.knowledgeBase.desc'), + title: t('clientDB.modal.features.knowledgeBase.title'), + }, + ]; + + return ( + + + + + {t('clientDB.modal.desc')} + + + + {features.map((item) => ( + + + + + + {item.title} + {item.desc} + + + ))} + + + + + {t('clientDB.modal.enable')} + + + + + + ); +}); + +export default EnableClientDBModal; diff --git a/src/features/InitClientDB/ErrorResult.tsx b/src/features/InitClientDB/ErrorResult.tsx new file mode 100644 index 000000000000..4ddc2bc2b80e --- /dev/null +++ b/src/features/InitClientDB/ErrorResult.tsx @@ -0,0 +1,125 @@ +import { Highlighter, Icon, Modal } from '@lobehub/ui'; +import { Button } from 'antd'; +import { createStyles } from 'antd-style'; +import isEqual from 'fast-deep-equal'; +import { TriangleAlert } from 'lucide-react'; +import Link from 'next/link'; +import { ReactNode, memo, useState } from 'react'; +import { Trans, useTranslation } from 'react-i18next'; +import { Center, Flexbox } from 'react-layout-kit'; +import Balancer from 'react-wrap-balancer'; + +import { GITHUB_ISSUES } from '@/const/url'; +import { githubService } from '@/services/github'; +import { useGlobalStore } from '@/store/global'; + +const useStyles = createStyles(({ css, token }) => ({ + bg: css` + cursor: pointer; + + padding-block: 8px; + padding-inline: 24px; + + background: ${token.red6}; + border-radius: 40px; + + transition: transform 0.2s; + + :hover { + transform: scale(1.05); + } + + :active { + transform: scale(1); + } + `, + + text: css` + font-size: 15px; + color: ${token.colorText}; + `, +})); + +interface MigrationError { + message: string; + stack: string; +} + +interface FailedModalProps { + children?: (props: { setOpen: (open: boolean) => void }) => ReactNode; + error?: MigrationError; +} + +const ErrorResult = memo(({ children }) => { + const { t } = useTranslation('common'); + const { styles } = useStyles(); + const initializeClientDB = useGlobalStore((s) => s.initializeClientDB); + const error = useGlobalStore((s) => s.initClientDBError, isEqual); + const [open, setOpen] = useState(false); + + return ( + <> + {children ? ( + children({ setOpen }) + ) : ( + { + setOpen(true); + }} + > + + + + {t('clientDB.initing.error')} + + )} + { + setOpen(false); + }} + onClose={() => { + setOpen(false); + }} + open={open} + title={t('clientDB.error.title')} + > + + + + 非常抱歉,Pglite 数据库初始化过程中发生异常。请尝试重试,或 + { + e.preventDefault(); + githubService.submitPgliteInitError(error); + }} + target="_blank" + > + 提交问题 + + 我们将会第一时间帮你排查问题。 + + + initializeClientDB()} size={'large'} type={'primary'}> + {t('clientDB.error.retry')} + + + + {t('clientDB.error.detail', { message: error?.message, type: error?.name })} + + {JSON.stringify(error, null, 2)} + + + + + > + ); +}); + +export default ErrorResult; diff --git a/src/features/InitClientDB/InitIndicator.tsx b/src/features/InitClientDB/InitIndicator.tsx new file mode 100644 index 000000000000..a7a3eac8c744 --- /dev/null +++ b/src/features/InitClientDB/InitIndicator.tsx @@ -0,0 +1,124 @@ +'use client'; + +import { Progress } from 'antd'; +import { createStyles } from 'antd-style'; +import { AnimatePresence, motion } from 'framer-motion'; +import { rgba } from 'polished'; +import { memo } from 'react'; +import { useTranslation } from 'react-i18next'; +import { Center, Flexbox } from 'react-layout-kit'; + +import { useGlobalStore } from '@/store/global'; +import { ClientDatabaseInitStages, DatabaseLoadingState } from '@/types/clientDB'; + +import ErrorResult from './ErrorResult'; + +const useStyles = createStyles(({ css, token, prefixCls }) => ({ + bg: css` + padding-block: 8px; + padding-inline: 8px 32px; + background: ${token.colorText}; + border-radius: 40px; + `, + container: css` + position: fixed; + z-index: 1000; + `, + progress: css` + .${prefixCls}-progress-text { + font-size: 12px; + color: ${token.colorBgContainer} !important; + } + `, + progressReady: css` + .${prefixCls}-progress-text { + color: ${token.colorSuccessBorder} !important; + } + `, + + text: css` + font-size: 15px; + color: ${token.colorBgContainer}; + `, +})); + +interface InitClientDBProps { + bottom?: number; + show: boolean; +} + +const InitClientDB = memo(({ bottom = 80, show }) => { + const { styles, theme, cx } = useStyles(); + const currentStage = useGlobalStore((s) => s.initClientDBStage || DatabaseLoadingState.Idle); + const { t } = useTranslation('common'); + const useInitClientDB = useGlobalStore((s) => s.useInitClientDB); + + useInitClientDB(); + + const getStateMessage = (state: DatabaseLoadingState) => { + switch (state) { + case DatabaseLoadingState.Finished: + case DatabaseLoadingState.Ready: { + return t('clientDB.initing.ready'); + } + + case DatabaseLoadingState.Idle: { + return t('clientDB.initing.idle'); + } + case DatabaseLoadingState.Initializing: { + return t('clientDB.initing.initializing'); + } + case DatabaseLoadingState.LoadingDependencies: { + return t('clientDB.initing.loadingDependencies'); + } + + case DatabaseLoadingState.LoadingWasm: { + return t('clientDB.initing.loadingWasmModule'); + } + + case DatabaseLoadingState.Migrating: { + return t('clientDB.initing.migrating'); + } + } + }; + + const currentStageIndex = ClientDatabaseInitStages.indexOf(currentStage); + const isReady = currentStage === DatabaseLoadingState.Finished; + const isError = currentStage === DatabaseLoadingState.Error; + return ( + + {show && ( + + + {isError ? ( + + ) : ( + + percent} + percent={parseInt( + ((currentStageIndex / (ClientDatabaseInitStages.length - 1)) * 100).toFixed(0), + )} + size={40} + strokeColor={isReady ? theme.colorSuccessActive : theme.colorBgContainer} + strokeLinecap={'round'} + strokeWidth={10} + trailColor={rgba(theme.colorBgContainer, 0.1)} + type={'circle'} + /> + {getStateMessage(currentStage)} + + )} + + + )} + + ); +}); +export default InitClientDB; diff --git a/src/features/InitClientDB/PGliteSVG.tsx b/src/features/InitClientDB/PGliteSVG.tsx new file mode 100644 index 000000000000..870ee143cbe5 --- /dev/null +++ b/src/features/InitClientDB/PGliteSVG.tsx @@ -0,0 +1,22 @@ +import { LucideIcon, LucideProps } from 'lucide-react'; +import { forwardRef } from 'react'; + +// @ts-expect-error 类型感觉不对,未来修正 +export const PGliteSVG: LucideIcon = forwardRef>>( + ({ size }, ref) => ( + + + + ), +); diff --git a/src/features/InitClientDB/index.tsx b/src/features/InitClientDB/index.tsx new file mode 100644 index 000000000000..19064e475cfc --- /dev/null +++ b/src/features/InitClientDB/index.tsx @@ -0,0 +1,37 @@ +'use client'; + +import { Spin } from 'antd'; +import dynamic from 'next/dynamic'; +import { memo } from 'react'; + +import { useGlobalStore } from '@/store/global'; +import { systemStatusSelectors } from '@/store/global/selectors'; + +const Modal = dynamic(() => import('./EnableModal'), { + loading: () => , + ssr: false, +}); + +const InitIndicator = dynamic(() => import('./InitIndicator'), { + ssr: false, +}); + +interface InitClientDBProps { + bottom?: number; +} + +const InitClientDB = memo(({ bottom }) => { + const isPgliteNotEnabled = useGlobalStore(systemStatusSelectors.isPgliteNotEnabled); + const isPgliteNotInited = useGlobalStore(systemStatusSelectors.isPgliteNotInited); + + return ( + <> + {/* 当用户没有设置启用 pglite 时,强弹窗引导用户来开启弹窗 */} + {isPgliteNotEnabled && } + {/* 当用户已经启用 pglite 但没有初始化时,展示初始化指示器 */} + {isPgliteNotInited && } + > + ); +}); + +export default InitClientDB; diff --git a/src/hooks/useCheckPluginsIsInstalled.ts b/src/hooks/useCheckPluginsIsInstalled.ts index f46273daecc3..a40470d75da2 100644 --- a/src/hooks/useCheckPluginsIsInstalled.ts +++ b/src/hooks/useCheckPluginsIsInstalled.ts @@ -3,8 +3,8 @@ import { systemStatusSelectors } from '@/store/global/selectors'; import { useToolStore } from '@/store/tool'; export const useCheckPluginsIsInstalled = (plugins: string[]) => { - const isPgliteInited = useGlobalStore(systemStatusSelectors.isPgliteInited); + const isDBInited = useGlobalStore(systemStatusSelectors.isDBInited); const checkPluginsIsInstalled = useToolStore((s) => s.useCheckPluginsIsInstalled); - checkPluginsIsInstalled(isPgliteInited, plugins); + checkPluginsIsInstalled(isDBInited, plugins); }; diff --git a/src/hooks/useFetchInstalledPlugins.ts b/src/hooks/useFetchInstalledPlugins.ts index 49812840daa5..ee0f78dd5f03 100644 --- a/src/hooks/useFetchInstalledPlugins.ts +++ b/src/hooks/useFetchInstalledPlugins.ts @@ -3,8 +3,8 @@ import { systemStatusSelectors } from '@/store/global/selectors'; import { useToolStore } from '@/store/tool'; export const useFetchInstalledPlugins = () => { - const isPgliteInited = useGlobalStore(systemStatusSelectors.isPgliteInited); + const isDBInited = useGlobalStore(systemStatusSelectors.isDBInited); const [useFetchInstalledPlugins] = useToolStore((s) => [s.useFetchInstalledPlugins]); - return useFetchInstalledPlugins(isPgliteInited); + return useFetchInstalledPlugins(isDBInited); }; diff --git a/src/hooks/useFetchMessages.ts b/src/hooks/useFetchMessages.ts index d5ff375ad7b5..617ff61f4f23 100644 --- a/src/hooks/useFetchMessages.ts +++ b/src/hooks/useFetchMessages.ts @@ -4,12 +4,12 @@ import { systemStatusSelectors } from '@/store/global/selectors'; import { useSessionStore } from '@/store/session'; export const useFetchMessages = () => { - const isPgliteInited = useGlobalStore(systemStatusSelectors.isPgliteInited); + const isDBInited = useGlobalStore(systemStatusSelectors.isDBInited); const [sessionId] = useSessionStore((s) => [s.activeId]); const [activeTopicId, useFetchMessages] = useChatStore((s) => [ s.activeTopicId, s.useFetchMessages, ]); - useFetchMessages(isPgliteInited, sessionId, activeTopicId); + useFetchMessages(isDBInited, sessionId, activeTopicId); }; diff --git a/src/hooks/useFetchSessions.ts b/src/hooks/useFetchSessions.ts index b8efaba05504..d9a8356152c8 100644 --- a/src/hooks/useFetchSessions.ts +++ b/src/hooks/useFetchSessions.ts @@ -5,9 +5,9 @@ import { useUserStore } from '@/store/user'; import { authSelectors } from '@/store/user/slices/auth/selectors'; export const useFetchSessions = () => { - const isPgliteInited = useGlobalStore(systemStatusSelectors.isPgliteInited); + const isDBInited = useGlobalStore(systemStatusSelectors.isDBInited); const isLogin = useUserStore(authSelectors.isLogin); const useFetchSessions = useSessionStore((s) => s.useFetchSessions); - useFetchSessions(isPgliteInited, isLogin); + useFetchSessions(isDBInited, isLogin); }; diff --git a/src/hooks/useFetchThreads.ts b/src/hooks/useFetchThreads.ts index 798f4d67e142..c4a21a7bc454 100644 --- a/src/hooks/useFetchThreads.ts +++ b/src/hooks/useFetchThreads.ts @@ -3,9 +3,9 @@ import { useGlobalStore } from '@/store/global'; import { systemStatusSelectors } from '@/store/global/selectors'; export const useFetchThreads = (activeTopicId?: string) => { - const isPgliteInited = useGlobalStore(systemStatusSelectors.isPgliteInited); + const isDBInited = useGlobalStore(systemStatusSelectors.isDBInited); const [useFetchThreads] = useChatStore((s) => [s.useFetchThreads]); - useFetchThreads(isPgliteInited, activeTopicId); + useFetchThreads(isDBInited, activeTopicId); }; diff --git a/src/hooks/useFetchTopics.ts b/src/hooks/useFetchTopics.ts index 5d7b0cddc07d..41b97ca19619 100644 --- a/src/hooks/useFetchTopics.ts +++ b/src/hooks/useFetchTopics.ts @@ -10,8 +10,8 @@ import { useSessionStore } from '@/store/session'; export const useFetchTopics = () => { const [sessionId] = useSessionStore((s) => [s.activeId]); const [activeTopicId, useFetchTopics] = useChatStore((s) => [s.activeTopicId, s.useFetchTopics]); - const isPgliteInited = useGlobalStore(systemStatusSelectors.isPgliteInited); + const isDBInited = useGlobalStore(systemStatusSelectors.isDBInited); - useFetchTopics(isPgliteInited, sessionId); + useFetchTopics(isDBInited, sessionId); useFetchThreads(activeTopicId); }; diff --git a/src/layout/GlobalProvider/StoreInitialization.tsx b/src/layout/GlobalProvider/StoreInitialization.tsx index 5f5b7f7fdab8..0291e41f28a6 100644 --- a/src/layout/GlobalProvider/StoreInitialization.tsx +++ b/src/layout/GlobalProvider/StoreInitialization.tsx @@ -51,8 +51,8 @@ const StoreInitialization = memo(() => { * But during initialization, the value of `enableAuth` might be incorrect cause of the async fetch. * So we need to use `isSignedIn` only to determine whether request for the default agent config and user state. */ - const isPgliteInited = useGlobalStore(systemStatusSelectors.isPgliteInited); - const isLoginOnInit = isPgliteInited && (enableNextAuth ? isSignedIn : isLogin); + const isDBInited = useGlobalStore(systemStatusSelectors.isDBInited); + const isLoginOnInit = isDBInited && (enableNextAuth ? isSignedIn : isLogin); // init inbox agent and default agent config useInitAgentStore(isLoginOnInit, serverConfig.defaultAgent?.config); diff --git a/src/services/baseClientService/index.ts b/src/services/baseClientService/index.ts new file mode 100644 index 000000000000..15c2350b234d --- /dev/null +++ b/src/services/baseClientService/index.ts @@ -0,0 +1,9 @@ +const FALLBACK_CLIENT_DB_USER_ID = 'DEFAULT_LOBE_CHAT_USER'; + +export class BaseClientService { + protected readonly userId: string; + + constructor(userId?: string) { + this.userId = userId || FALLBACK_CLIENT_DB_USER_ID; + } +} diff --git a/src/services/debug.ts b/src/services/debug.ts index 1584e5f8de3e..51ce6d59bcb4 100644 --- a/src/services/debug.ts +++ b/src/services/debug.ts @@ -1,39 +1,37 @@ -import { DEBUG_MODEL } from '@/database/_deprecated/models/__DEBUG'; - class DebugService { async insertLargeDataToDB() { - await DEBUG_MODEL.createRandomData({ - messageCount: 100_000, - sessionCount: 40, - startIndex: 0, - topicCount: 200, - }); - - console.log('已插入10w'); - - await DEBUG_MODEL.createRandomData({ - messageCount: 300_000, - sessionCount: 40, - startIndex: 100_001, - topicCount: 200, - }); - console.log('已插入40w'); - - await DEBUG_MODEL.createRandomData({ - messageCount: 300_000, - sessionCount: 40, - startIndex: 400_001, - topicCount: 200, - }); - console.log('已插入70w'); - - await DEBUG_MODEL.createRandomData({ - messageCount: 300_000, - sessionCount: 40, - startIndex: 700_001, - topicCount: 200, - }); - console.log('已插入100w'); + // await DEBUG_MODEL.createRandomData({ + // messageCount: 100_000, + // sessionCount: 40, + // startIndex: 0, + // topicCount: 200, + // }); + // + // console.log('已插入10w'); + // + // await DEBUG_MODEL.createRandomData({ + // messageCount: 300_000, + // sessionCount: 40, + // startIndex: 100_001, + // topicCount: 200, + // }); + // console.log('已插入40w'); + // + // await DEBUG_MODEL.createRandomData({ + // messageCount: 300_000, + // sessionCount: 40, + // startIndex: 400_001, + // topicCount: 200, + // }); + // console.log('已插入70w'); + // + // await DEBUG_MODEL.createRandomData({ + // messageCount: 300_000, + // sessionCount: 40, + // startIndex: 700_001, + // topicCount: 200, + // }); + // console.log('已插入100w'); } } diff --git a/src/services/file/index.ts b/src/services/file/index.ts index 72bb54f91872..c148ef7974e1 100644 --- a/src/services/file/index.ts +++ b/src/services/file/index.ts @@ -1,5 +1,9 @@ -import { ClientService } from './client'; +import { ClientService as DeprecatedService } from './client'; +import { ClientService } from './pglite'; import { ServerService } from './server'; +const clientService = + process.env.NEXT_PUBLIC_CLIENT_DB === 'pglite' ? new ClientService() : new DeprecatedService(); + export const fileService = - process.env.NEXT_PUBLIC_SERVICE_MODE === 'server' ? new ServerService() : new ClientService(); + process.env.NEXT_PUBLIC_SERVICE_MODE === 'server' ? new ServerService() : clientService; diff --git a/src/services/file/pglite.test.ts b/src/services/file/pglite.test.ts new file mode 100644 index 000000000000..5166f6d3138a --- /dev/null +++ b/src/services/file/pglite.test.ts @@ -0,0 +1,198 @@ +import { eq } from 'drizzle-orm'; +import { beforeEach, describe, expect, it } from 'vitest'; + +import { clientDB, initializeDB } from '@/database/client/db'; +import { files, globalFiles, users } from '@/database/schemas'; +import { clientS3Storage } from '@/services/file/ClientS3'; +import { UploadFileParams } from '@/types/files'; + +import { ClientService } from './pglite'; + +const userId = 'file-user'; + +const fileService = new ClientService(userId); + +const mockFile = { + name: 'mock.png', + fileType: 'image/png', + size: 1, + url: '', +}; + +beforeEach(async () => { + await initializeDB(); + + await clientDB.delete(users); + await clientDB.delete(globalFiles); + // 创建测试数据 + await clientDB.transaction(async (tx) => { + await tx.insert(users).values({ id: userId }); + }); +}); + +describe('FileService', () => { + describe('createFile', () => { + it('createFile should save the file to the database', async () => { + const localFile: UploadFileParams = { + name: 'test', + fileType: 'image/png', + url: '', + size: 1, + hash: '123', + }; + + await clientS3Storage.putObject( + '123', + new File([new ArrayBuffer(1)], 'test.png', { type: 'image/png' }), + ); + + const result = await fileService.createFile(localFile); + + expect(result).toMatchObject({ url: 'data:image/png;base64,AA==' }); + }); + + it('should throw error when file is not found in storage during base64 conversion', async () => { + const localFile: UploadFileParams = { + name: 'test', + fileType: 'image/png', + url: '', + size: 1, + hash: 'non-existing-hash', + }; + + // 不调用 clientS3Storage.putObject,模拟文件不存在的情况 + + const promise = fileService.createFile(localFile); + + await expect(promise).rejects.toThrow('file not found'); + }); + }); + + it('removeFile should delete the file from the database', async () => { + const fileId = '1'; + await clientDB.insert(files).values({ id: fileId, userId, ...mockFile }); + + await fileService.removeFile(fileId); + + const result = await clientDB.query.files.findFirst({ + where: eq(files.id, fileId), + }); + + expect(result).toBeUndefined(); + }); + + describe('getFile', () => { + it('should retrieve and convert local file info to FilePreview', async () => { + const fileId = 'rwlijweled'; + const file = { + fileType: 'image/png', + size: 1, + name: 'test.png', + url: 'idb://12312/abc.png', + hashId: '123tttt', + }; + + await clientDB.insert(globalFiles).values(file); + + await clientDB.insert(files).values({ + id: fileId, + userId, + ...file, + createdAt: new Date(1), + updatedAt: new Date(2), + fileHash: file.hashId, + }); + + await clientS3Storage.putObject( + file.hashId, + new File([new ArrayBuffer(1)], file.name, { type: file.fileType }), + ); + + const result = await fileService.getFile(fileId); + + expect(result).toMatchObject({ + createdAt: new Date(1), + id: 'rwlijweled', + size: 1, + type: 'image/png', + name: 'test.png', + updatedAt: new Date(2), + }); + }); + + it('should throw an error when the file is not found', async () => { + const fileId = 'non-existent'; + + const getFilePromise = fileService.getFile(fileId); + + await expect(getFilePromise).rejects.toThrow('file not found'); + }); + }); + + describe('removeFiles', () => { + it('should delete multiple files from the database', async () => { + const fileIds = ['1', '2', '3']; + + // 插入测试文件数据 + await Promise.all( + fileIds.map((id) => clientDB.insert(files).values({ id, userId, ...mockFile })), + ); + + await fileService.removeFiles(fileIds); + + // 验证所有文件都被删除 + const remainingFiles = await clientDB.query.files.findMany({ + where: (fields, { inArray }) => inArray(fields.id, fileIds), + }); + + expect(remainingFiles).toHaveLength(0); + }); + }); + + describe('removeAllFiles', () => { + it('should clear all files for the user', async () => { + // 插入测试文件数据 + await Promise.all([ + clientDB.insert(files).values({ id: '1', userId, ...mockFile }), + clientDB.insert(files).values({ id: '2', userId, ...mockFile }), + ]); + + await fileService.removeAllFiles(); + + // 验证用户的所有文件都被删除 + const remainingFiles = await clientDB.query.files.findMany({ + where: eq(files.userId, userId), + }); + + expect(remainingFiles).toHaveLength(0); + }); + }); + + describe('checkFileHash', () => { + it('should return true if file hash exists', async () => { + const hash = 'existing-hash'; + await clientDB.insert(globalFiles).values({ + ...mockFile, + hashId: hash, + }); + await clientDB.insert(files).values({ + id: '1', + userId, + ...mockFile, + fileHash: hash, + }); + + const exists = await fileService.checkFileHash(hash); + + expect(exists).toMatchObject({ isExist: true }); + }); + + it('should return false if file hash does not exist', async () => { + const hash = 'non-existing-hash'; + + const exists = await fileService.checkFileHash(hash); + + expect(exists).toEqual({ isExist: false }); + }); + }); +}); diff --git a/src/services/file/pglite.ts b/src/services/file/pglite.ts new file mode 100644 index 000000000000..b6c2818b6c39 --- /dev/null +++ b/src/services/file/pglite.ts @@ -0,0 +1,84 @@ +import { clientDB } from '@/database/client/db'; +import { FileModel } from '@/database/server/models/file'; +import { BaseClientService } from '@/services/baseClientService'; +import { clientS3Storage } from '@/services/file/ClientS3'; +import { FileItem, UploadFileParams } from '@/types/files'; + +import { IFileService } from './type'; + +export class ClientService extends BaseClientService implements IFileService { + private get fileModel(): FileModel { + return new FileModel(clientDB as any, this.userId); + } + + async createFile(file: UploadFileParams) { + // save to local storage + // we may want to save to a remote server later + const res = await this.fileModel.create( + { + fileHash: file.hash, + fileType: file.fileType, + knowledgeBaseId: file.knowledgeBaseId, + metadata: file.metadata, + name: file.name, + size: file.size, + url: file.url!, + }, + true, + ); + + // get file to base64 url + const base64 = await this.getBase64ByFileHash(file.hash!); + + return { + id: res.id, + url: `data:${file.fileType};base64,${base64}`, + }; + } + + async getFile(id: string): Promise { + const item = await this.fileModel.findById(id); + if (!item) { + throw new Error('file not found'); + } + + // arrayBuffer to url + const fileItem = await clientS3Storage.getObject(item.fileHash!); + if (!fileItem) throw new Error('file not found'); + + const url = URL.createObjectURL(fileItem); + + return { + createdAt: new Date(item.createdAt), + id, + name: item.name, + size: item.size, + type: item.fileType, + updatedAt: new Date(item.updatedAt), + url, + }; + } + + async removeFile(id: string) { + await this.fileModel.delete(id, false); + } + + async removeFiles(ids: string[]) { + await this.fileModel.deleteMany(ids, false); + } + + async removeAllFiles() { + return this.fileModel.clear(); + } + + async checkFileHash(hash: string) { + return this.fileModel.checkHash(hash); + } + + private async getBase64ByFileHash(hash: string) { + const fileItem = await clientS3Storage.getObject(hash); + if (!fileItem) throw new Error('file not found'); + + return Buffer.from(await fileItem.arrayBuffer()).toString('base64'); + } +} diff --git a/src/services/file/type.ts b/src/services/file/type.ts index 6ed6a090d257..5dc003cbeaa0 100644 --- a/src/services/file/type.ts +++ b/src/services/file/type.ts @@ -1,12 +1,13 @@ -import { FileItem, UploadFileParams } from '@/types/files'; +import { CheckFileHashResult, FileItem, UploadFileParams } from '@/types/files'; export interface IFileService { + checkFileHash(hash: string): Promise; createFile( file: UploadFileParams, knowledgeBaseId?: string, ): Promise<{ id: string; url: string }>; getFile(id: string): Promise; removeAllFiles(): Promise; - removeFile(id: string): Promise; - removeFiles(ids: string[]): Promise; + removeFile(id: string): Promise; + removeFiles(ids: string[]): Promise; } diff --git a/src/services/github.ts b/src/services/github.ts index e00a9a88611e..0fa91682436f 100644 --- a/src/services/github.ts +++ b/src/services/github.ts @@ -37,6 +37,23 @@ class GitHubService { window.open(url, '_blank'); }; + + submitPgliteInitError = (error?: { message: string }) => { + const body = ['```json', JSON.stringify(error, null, 2), '```'].join('\n'); + + const message = error?.message || ''; + + const url = qs.stringifyUrl({ + query: { + body, + labels: '❌ Database Init Error', + title: `[Database Init Error] ${message}`, + }, + url: urlJoin(GITHUB, '/issues/new'), + }); + + window.open(url, '_blank'); + }; } export const githubService = new GitHubService(); diff --git a/src/services/import/index.ts b/src/services/import/index.ts index 4f2ab75bfea8..5519bcc434fb 100644 --- a/src/services/import/index.ts +++ b/src/services/import/index.ts @@ -1,5 +1,9 @@ -import { ClientService } from './client'; +import { ClientService as DeprecatedService } from './client'; +import { ClientService } from './pglite'; import { ServerService } from './server'; +const clientService = + process.env.NEXT_PUBLIC_CLIENT_DB === 'pglite' ? new ClientService() : new DeprecatedService(); + export const importService = - process.env.NEXT_PUBLIC_SERVICE_MODE === 'server' ? new ServerService() : new ClientService(); + process.env.NEXT_PUBLIC_SERVICE_MODE === 'server' ? new ServerService() : clientService; diff --git a/src/services/import/pglite.test.ts b/src/services/import/pglite.test.ts new file mode 100644 index 000000000000..25d526b78a93 --- /dev/null +++ b/src/services/import/pglite.test.ts @@ -0,0 +1,997 @@ +import { eq, inArray } from 'drizzle-orm'; +import { beforeEach, describe, expect, it, vi } from 'vitest'; + +import { clientDB, initializeDB } from '@/database/client/db'; +import mockImportData from '@/database/repositories/dataImporter/__tests__/fixtures/messages.json'; +import { + agents, + agentsToSessions, + messages, + sessionGroups, + sessions, + topics, + users, +} from '@/database/schemas'; +import { CURRENT_CONFIG_VERSION } from '@/migrations'; +import { ImportResults, ImporterEntryData } from '@/types/importer'; + +import { ClientService } from './pglite'; + +const userId = 'test-user-id'; +const service = new ClientService(userId); + +beforeEach(async () => { + await initializeDB(); + + await clientDB.delete(users); + + // 创建测试数据 + await clientDB.transaction(async (tx) => { + await tx.insert(users).values({ id: userId }); + }); +}); + +describe('ImporterService', () => { + describe('import sessionGroups', () => { + it('should import session groups and return correct result', async () => { + const data: ImporterEntryData = { + version: CURRENT_CONFIG_VERSION, + sessionGroups: [ + { id: 'group1', name: 'Group 1', createdAt: 1715186011586, updatedAt: 1715186015053 }, + { id: 'group2', name: 'Group 2', createdAt: 1715186011586, updatedAt: 1715186015053 }, + ], + }; + + let result: ImportResults; + await service.importData(data, { + onSuccess: (res) => { + result = res; + }, + }); + + expect(result!.sessionGroups!.added).toBe(2); + expect(result!.sessionGroups!.skips).toBe(0); + expect(result!.sessionGroups!.errors).toBe(0); + + const groups = await clientDB.query.sessionGroups.findMany({ + where: eq(sessionGroups.userId, userId), + }); + expect(groups).toHaveLength(2); + }); + + it('should skip existing session groups and return correct result', async () => { + await clientDB + .insert(sessionGroups) + .values({ clientId: 'group1', name: 'Existing Group', userId }) + .execute(); + + const data: ImporterEntryData = { + version: CURRENT_CONFIG_VERSION, + sessionGroups: [ + { id: 'group1', name: 'Group 1', createdAt: 1715186011586, updatedAt: 1715186015053 }, + { id: 'group2', name: 'Group 2', createdAt: 1715186011586, updatedAt: 1715186015053 }, + ], + }; + + let result: ImportResults; + await service.importData(data, { + onSuccess: (res) => { + result = res; + }, + }); + + expect(result!.sessionGroups!.added).toBe(1); + expect(result!.sessionGroups!.skips).toBe(1); + expect(result!.sessionGroups!.errors).toBe(0); + }); + }); + + describe('import sessions', () => { + it('should import sessions and return correct result', async () => { + const data: ImporterEntryData = { + version: CURRENT_CONFIG_VERSION, + sessions: [ + { + id: 'session1', + createdAt: '2022-05-14T18:18:10.494Z', + updatedAt: '2023-01-01', + type: 'agent', + config: { + model: 'abc', + chatConfig: {} as any, + params: {}, + systemRole: 'abc', + tts: {} as any, + }, + meta: { + title: 'Session 1', + }, + }, + { + id: 'session2', + createdAt: '2022-05-14T18:18:10.494Z', + updatedAt: '2023-01-01', + type: 'agent', + config: { + model: 'abc', + chatConfig: {} as any, + params: {}, + systemRole: 'abc', + tts: {} as any, + }, + meta: { + title: 'Session 2', + }, + }, + ], + }; + + let result: ImportResults; + await service.importData(data, { + onSuccess: (res) => { + result = res; + }, + }); + + expect(result!.sessions!.added).toBe(2); + expect(result!.sessions!.skips).toBe(0); + expect(result!.sessions!.errors).toBe(0); + + const importedSessions = await clientDB.query.sessions.findMany({ + where: eq(sessions.userId, userId), + }); + expect(importedSessions).toHaveLength(2); + + const agentCount = await clientDB.query.agents.findMany({ + where: eq(agents.userId, userId), + }); + + expect(agentCount.length).toBe(2); + + const agentSessionCount = await clientDB.query.agentsToSessions.findMany(); + expect(agentSessionCount.length).toBe(2); + }); + + it('should skip existing sessions and return correct result', async () => { + await clientDB.insert(sessions).values({ clientId: 'session1', userId }).execute(); + + const data: ImporterEntryData = { + version: CURRENT_CONFIG_VERSION, + sessions: [ + { + id: 'session1', + createdAt: '2022-05-14T18:18:10.494Z', + updatedAt: '2023-01-01', + type: 'agent', + config: { + model: 'abc', + chatConfig: {} as any, + params: {}, + systemRole: 'abc', + tts: {} as any, + }, + meta: { + title: 'Session 1', + }, + }, + { + id: 'session2', + createdAt: '2022-05-14T18:18:10.494Z', + updatedAt: '2023-01-01', + type: 'agent', + config: { + model: 'abc', + chatConfig: {} as any, + params: {}, + systemRole: 'abc', + tts: {} as any, + }, + meta: { + title: 'Session 2', + }, + }, + ], + }; + + let result: ImportResults; + await service.importData(data, { + onSuccess: (res) => { + result = res; + }, + }); + + expect(result!.sessions!.added).toBe(1); + expect(result!.sessions!.skips).toBe(1); + expect(result!.sessions!.errors).toBe(0); + }); + + it('should associate imported sessions with session groups', async () => { + const data: ImporterEntryData = { + version: CURRENT_CONFIG_VERSION, + sessionGroups: [ + { id: 'group1', name: 'Group 1', createdAt: 1715186011586, updatedAt: 1715186015053 }, + { id: 'group2', name: 'Group 2', createdAt: 1715186011586, updatedAt: 1715186015053 }, + ], + sessions: [ + { + id: 'session1', + createdAt: '2022-05-14T18:18:10.494Z', + updatedAt: '2023-01-01', + type: 'agent', + group: 'group1', + config: { + model: 'abc', + chatConfig: {} as any, + params: {}, + systemRole: 'abc', + tts: {} as any, + }, + meta: { + title: 'Session 1', + }, + }, + { + id: 'session2', + group: 'group2', + createdAt: '2022-05-14T18:18:10.494Z', + updatedAt: '2023-01-01', + type: 'agent', + config: { + model: 'abc', + chatConfig: {} as any, + params: {}, + systemRole: 'abc', + tts: {} as any, + }, + meta: { + title: 'Session 2', + }, + }, + { + id: 'session3', + group: 'group4', + createdAt: '2022-05-14T18:18:10.494Z', + updatedAt: '2023-01-01', + type: 'agent', + config: { + model: 'abc', + chatConfig: {} as any, + params: {}, + systemRole: 'abc', + tts: {} as any, + }, + meta: { + title: 'Session 3', + }, + }, + ], + }; + + let result: ImportResults; + await service.importData(data, { + onSuccess: (res) => { + result = res; + }, + }); + + expect(result!.sessionGroups!.added).toBe(2); + expect(result!.sessionGroups!.skips).toBe(0); + + expect(result!.sessions!.added).toBe(3); + expect(result!.sessions!.skips).toBe(0); + + // session 1 should be associated with group 1 + const session1 = await clientDB.query.sessions.findFirst({ + where: eq(sessions.clientId, 'session1'), + with: { group: true }, + }); + expect(session1?.group).toBeDefined(); + + // session 3 should not have group + const session3 = await clientDB.query.sessions.findFirst({ + where: eq(sessions.clientId, 'session3'), + with: { group: true }, + }); + expect(session3?.group).toBeNull(); + }); + + it('should create agents and associate them with imported sessions', async () => { + const data: ImporterEntryData = { + version: CURRENT_CONFIG_VERSION, + sessions: [ + { + id: 'session1', + createdAt: '2022-05-14T18:18:10.494Z', + updatedAt: '2023-01-01', + type: 'agent', + config: { + model: 'abc', + chatConfig: {} as any, + params: {}, + systemRole: 'Test Agent 1', + tts: {} as any, + }, + meta: { + title: 'Session 1', + }, + }, + { + id: 'session2', + createdAt: '2022-05-14T18:18:10.494Z', + updatedAt: '2023-01-01', + type: 'agent', + config: { + model: 'def', + chatConfig: {} as any, + params: {}, + systemRole: 'Test Agent 2', + tts: {} as any, + }, + meta: { + title: 'Session 2', + }, + }, + ], + }; + + await service.importData(data); + + // 验证是否为每个 session 创建了对应的 agent + const agentCount = await clientDB.query.agents.findMany({ + where: eq(agents.userId, userId), + }); + expect(agentCount).toHaveLength(2); + + // 验证 agent 的属性是否正确设置 + const agent1 = await clientDB.query.agents.findFirst({ + where: eq(agents.systemRole, 'Test Agent 1'), + }); + expect(agent1?.model).toBe('abc'); + + const agent2 = await clientDB.query.agents.findFirst({ + where: eq(agents.systemRole, 'Test Agent 2'), + }); + expect(agent2?.model).toBe('def'); + + // 验证 agentsToSessions 关联是否正确建立 + const session1 = await clientDB.query.sessions.findFirst({ + where: eq(sessions.clientId, 'session1'), + }); + const session1Agent = await clientDB.query.agentsToSessions.findFirst({ + where: eq(agentsToSessions.sessionId, session1?.id!), + with: { agent: true }, + }); + + expect((session1Agent?.agent as any).systemRole).toBe('Test Agent 1'); + + const session2 = await clientDB.query.sessions.findFirst({ + where: eq(sessions.clientId, 'session2'), + }); + const session2Agent = await clientDB.query.agentsToSessions.findFirst({ + where: eq(agentsToSessions.sessionId, session2?.id!), + with: { agent: true }, + }); + + expect((session2Agent?.agent as any).systemRole).toBe('Test Agent 2'); + }); + + it('should not create duplicate agents for existing sessions', async () => { + // 先导入一些 sessions + await service.importData({ + sessions: [ + { + id: 'session1', + createdAt: '2022-05-14T18:18:10.494Z', + updatedAt: '2023-01-01', + type: 'agent', + config: { + model: 'abc', + chatConfig: {} as any, + params: {}, + systemRole: 'Test Agent 1', + tts: {} as any, + }, + meta: { + title: 'Session 1', + }, + }, + ], + version: CURRENT_CONFIG_VERSION, + }); + + // 再次导入相同的 sessions + await service.importData({ + sessions: [ + { + id: 'session1', + createdAt: '2022-05-14T18:18:10.494Z', + updatedAt: '2023-01-01', + type: 'agent', + config: { + model: 'abc', + chatConfig: {} as any, + params: {}, + systemRole: 'Test Agent 1', + tts: {} as any, + }, + meta: { + title: 'Session 1', + }, + }, + ], + version: CURRENT_CONFIG_VERSION, + }); + + // 验证只创建了一个 agent + const agentCount = await clientDB.query.agents.findMany({ + where: eq(agents.userId, userId), + }); + expect(agentCount).toHaveLength(1); + }); + }); + + describe('import topics', () => { + it('should import topics and return correct result', async () => { + const data: ImporterEntryData = { + version: CURRENT_CONFIG_VERSION, + topics: [ + { + id: 'topic1', + title: 'Topic 1', + createdAt: 1715186011586, + updatedAt: 1715186015053, + sessionId: 'session1', + }, + { + id: 'topic2', + title: 'Topic 2', + createdAt: 1715186011586, + updatedAt: 1715186015053, + sessionId: 'session2', + }, + ], + sessions: [ + { + id: 'session1', + createdAt: '2022-05-14T18:18:10.494Z', + updatedAt: '2023-01-01', + type: 'agent', + config: { + model: 'abc', + chatConfig: {} as any, + params: {}, + systemRole: 'abc', + tts: {} as any, + }, + meta: { + title: 'Session 1', + }, + }, + { + id: 'session2', + createdAt: '2022-05-14T18:18:10.494Z', + updatedAt: '2023-01-01', + type: 'agent', + config: { + model: 'abc', + chatConfig: {} as any, + params: {}, + systemRole: 'abc', + tts: {} as any, + }, + meta: { + title: 'Session 2', + }, + }, + ], + }; + + let result: ImportResults; + await service.importData(data, { + onSuccess: (res) => { + result = res; + }, + }); + + expect(result!.topics!.added).toBe(2); + expect(result!.topics!.skips).toBe(0); + expect(result!.topics!.errors).toBe(0); + + const importedTopics = await clientDB.query.topics.findMany({ + where: eq(topics.userId, userId), + }); + expect(importedTopics).toHaveLength(2); + }); + + it('should skip existing topics and return correct result', async () => { + await clientDB + .insert(topics) + .values({ clientId: 'topic1', title: 'Existing Topic', userId }) + .execute(); + + const data: ImporterEntryData = { + version: CURRENT_CONFIG_VERSION, + topics: [ + { id: 'topic1', title: 'Topic 1', createdAt: 1715186011586, updatedAt: 1715186015053 }, + { id: 'topic2', title: 'Topic 2', createdAt: 1715186011586, updatedAt: 1715186015053 }, + ], + }; + + let result: ImportResults; + await service.importData(data, { + onSuccess: (res) => { + result = res; + }, + }); + + expect(result!.topics!.added).toBe(1); + expect(result!.topics!.skips).toBe(1); + expect(result!.topics!.errors).toBe(0); + }); + + it('should associate imported topics with sessions', async () => { + const data: ImporterEntryData = { + version: CURRENT_CONFIG_VERSION, + sessions: [ + { + id: 'session1', + createdAt: '2022-05-14T18:18:10.494Z', + updatedAt: '2023-01-01', + type: 'agent', + config: { + model: 'abc', + chatConfig: {} as any, + params: {}, + systemRole: 'abc', + tts: {} as any, + }, + meta: { + title: 'Session 1', + }, + }, + ], + topics: [ + { + id: 'topic1', + title: 'Topic 1', + createdAt: 1715186011586, + updatedAt: 1715186015053, + sessionId: 'session1', + }, + { id: 'topic2', title: 'Topic 2', createdAt: 1715186011586, updatedAt: 1715186015053 }, + ], + }; + + await service.importData(data); + + // topic1 should be associated with session1 + const [topic1] = await clientDB + .select({ sessionClientId: sessions.clientId }) + .from(topics) + .where(eq(topics.clientId, 'topic1')) + .leftJoin(sessions, eq(topics.sessionId, sessions.id)); + + expect(topic1?.sessionClientId).toBe('session1'); + + // topic2 should not have session + const topic2 = await clientDB.query.topics.findFirst({ + where: eq(topics.clientId, 'topic2'), + with: { session: true }, + }); + expect(topic2?.session).toBeNull(); + }); + }); + + describe('import messages', () => { + it('should import messages and return correct result', async () => { + const data: ImporterEntryData = { + version: CURRENT_CONFIG_VERSION, + messages: [ + { + id: 'msg1', + content: 'Message 1', + role: 'user', + createdAt: 1715186011586, + updatedAt: 1715186015053, + sessionId: 'session1', + topicId: 'topic1', + }, + { + id: 'msg2', + content: 'Message 2', + role: 'assistant', + createdAt: 1715186011586, + updatedAt: 1715186015053, + sessionId: 'session1', + topicId: 'topic1', + parentId: 'msg1', + }, + ], + sessions: [ + { + id: 'session1', + createdAt: '2022-05-14T18:18:10.494Z', + updatedAt: '2023-01-01', + type: 'agent', + config: { + model: 'abc', + chatConfig: {} as any, + params: {}, + systemRole: 'abc', + tts: {} as any, + }, + meta: { + title: 'Session 1', + }, + }, + ], + topics: [ + { + id: 'topic1', + title: 'Topic 1', + createdAt: 1715186011586, + updatedAt: 1715186015053, + sessionId: 'session1', + }, + ], + }; + + let result: ImportResults; + await service.importData(data, { + onSuccess: (res) => { + result = res; + }, + }); + + expect(result!.messages!.added).toBe(2); + expect(result!.messages!.skips).toBe(0); + expect(result!.messages!.errors).toBe(0); + + const importedMessages = await clientDB.query.messages.findMany({ + where: eq(messages.userId, userId), + }); + expect(importedMessages).toHaveLength(2); + }); + + it('should skip existing messages and return correct result', async () => { + await clientDB + .insert(messages) + .values({ + clientId: 'msg1', + content: 'Existing Message', + role: 'user', + userId, + }) + .execute(); + + const data: ImporterEntryData = { + version: CURRENT_CONFIG_VERSION, + messages: [ + { + id: 'msg1', + content: 'Message 1', + role: 'user', + createdAt: 1715186011586, + updatedAt: 1715186015053, + }, + { + id: 'msg2', + content: 'Message 2', + role: 'assistant', + createdAt: 1715186011586, + updatedAt: 1715186015053, + }, + ], + }; + + let result: ImportResults; + await service.importData(data, { + onSuccess: (res) => { + result = res; + }, + }); + + expect(result!.messages!.added).toBe(1); + expect(result!.messages!.skips).toBe(1); + expect(result!.messages!.errors).toBe(0); + }); + + it('should associate imported messages with sessions and topics', async () => { + const data: ImporterEntryData = { + version: CURRENT_CONFIG_VERSION, + sessions: [ + { + id: 'session1', + createdAt: '2022-05-14T18:18:10.494Z', + updatedAt: '2023-01-01', + type: 'agent', + config: { + model: 'abc', + chatConfig: {} as any, + params: {}, + systemRole: 'abc', + tts: {} as any, + }, + meta: { + title: 'Session 1', + }, + }, + ], + topics: [ + { + id: 'topic1', + title: 'Topic 1', + createdAt: 1715186011586, + updatedAt: 1715186015053, + sessionId: 'session1', + }, + ], + messages: [ + { + id: 'msg1', + content: 'Message 1', + role: 'user', + createdAt: 1715186011586, + updatedAt: 1715186015053, + sessionId: 'session1', + topicId: 'topic1', + }, + { + id: 'msg2', + content: 'Message 2', + role: 'assistant', + createdAt: 1715186011586, + updatedAt: 1715186015053, + sessionId: 'session1', + topicId: 'topic1', + parentId: 'msg1', + }, + { + id: 'msg3', + content: 'Message 3', + role: 'user', + createdAt: 1715186011586, + updatedAt: 1715186015053, + }, + ], + }; + + await service.importData(data); + + // msg1 and msg2 should be associated with session1 and topic1 + const [msg1, msg2] = await clientDB.query.messages.findMany({ + where: inArray(messages.clientId, ['msg1', 'msg2']), + with: { + session: true, + topic: true, + }, + }); + + expect(msg1.session?.clientId).toBe('session1'); + expect(msg1.topic?.clientId).toBe('topic1'); + expect(msg2.session?.clientId).toBe('session1'); + expect(msg2.topic?.clientId).toBe('topic1'); + + // msg3 should not have session and topic + const msg3 = await clientDB.query.messages.findFirst({ + where: eq(messages.clientId, 'msg3'), + with: { + session: true, + topic: true, + }, + }); + expect(msg3?.session).toBeNull(); + expect(msg3?.topic).toBeNull(); + }); + + it('should set parentId for messages', async () => { + const data: ImporterEntryData = { + version: CURRENT_CONFIG_VERSION, + messages: [ + { + id: 'msg1', + content: 'Message 1', + role: 'user', + createdAt: 1715186011586, + updatedAt: 1715186015053, + }, + { + id: 'msg2', + content: 'Message 2', + role: 'assistant', + createdAt: 1715186011586, + updatedAt: 1715186015053, + parentId: 'msg1', + }, + ], + }; + + await service.importData(data); + + const msg2 = await clientDB.query.messages.findFirst({ + where: eq(messages.clientId, 'msg2'), + with: { parent: true }, + }); + + expect(msg2?.parent?.clientId).toBe('msg1'); + }); + + it('should import parentId Success', () => {}); + }); + + describe('real world examples', () => { + it('should import successfully', async () => { + let result: ImportResults; + + await service.importData( + { + messages: [ + { + role: 'user', + content: 'hello', + files: [], + sessionId: 'inbox', + topicId: '2wcF8yaS', + createdAt: 1714236590340, + id: 'DCG1G1EH', + updatedAt: 1714236590340, + extra: {}, + }, + { + role: 'assistant', + content: '...', + parentId: 'DCG1G1EH', + sessionId: 'inbox', + topicId: '2wcF8yaS', + createdAt: 1714236590441, + id: 'gY41w5vQ', + updatedAt: 1714236590518, + error: { + body: { + error: { + message: "model 'mixtral' not found, try pulling it first", + name: 'ResponseError', + status_code: 404, + }, + provider: 'ollama', + }, + message: + 'Error requesting Ollama service, please troubleshoot or retry based on the following information', + type: 'OllamaBizError', + }, + extra: { fromModel: 'mixtral', fromProvider: 'ollama' }, + }, + { + role: 'user', + content: 'hello', + files: [], + sessionId: 'a5fefc88-f6c1-44fb-9e98-3d366b1ed589', + topicId: 'v38snJ0A', + createdAt: 1717080410895, + id: 'qOIxEGEB', + updatedAt: 1717080410895, + extra: {}, + }, + { + role: 'assistant', + content: '...', + parentId: 'qOIxEGEB', + sessionId: 'a5fefc88-f6c1-44fb-9e98-3d366b1ed589', + topicId: 'v38snJ0A', + createdAt: 1717080410970, + id: 'w28FcqY5', + updatedAt: 1717080411485, + error: { + body: { error: { errorType: 'NoOpenAIAPIKey' }, provider: 'openai' }, + message: 'OpenAI API Key is empty, please add a custom OpenAI API Key', + type: 'NoOpenAIAPIKey', + }, + extra: { fromModel: 'gpt-3.5-turbo', fromProvider: 'openai' }, + }, + ], + sessionGroups: [ + { + name: 'Writter', + sort: 0, + createdAt: 1706114744425, + id: 'XlUbvOvL', + updatedAt: 1706114747468, + }, + ], + sessions: [ + { + config: { + model: 'gpt-3.5-turbo', + params: { + frequency_penalty: 0, + presence_penalty: 0, + temperature: 0.6, + top_p: 1, + }, + plugins: [], + systemRole: + "You are a LobeChat technical operator 🍐🐊. You now need to write a developer's guide for LobeChat as a guide for them to develop LobeChat. This guide will include several sections, and you need to output the corresponding document content based on the user's input.\n\nHere is the technical introduction of LobeChat\n\n LobeChat is an AI conversation application built with the Next.js framework. It uses a series of technology stacks to implement various functions and features.\n\n\n ## Basic Technology Stack\n\n The core technology stack of LobeChat is as follows:\n\n - **Framework**: We chose [Next.js](https://nextjs.org/), a powerful React framework that provides key features such as server-side rendering, routing framework, and Router Handler for our project.\n - **Component Library**: We use [Ant Design (antd)](https://ant.design/) as the basic component library, and introduce [lobe-ui](https://github.com/lobehub/lobe-ui) as our business component library.\n - **State Management**: We use [zustand](https://github.com/pmndrs/zustand), a lightweight and easy-to-use state management library.\n - **Network Request**: We adopt [swr](https://swr.vercel.app/), a React Hooks library for data fetching.\n - **Routing**: We directly use the routing solution provided by [Next.js](https://nextjs.org/) itself.\n - **Internationalization**: We use [i18next](https://www.i18next.com/) to implement multi-language support for the application.\n - **Styling**: We use [antd-style](https://github.com/ant-design/antd-style), a CSS-in-JS library that is compatible with Ant Design.\n - **Unit Testing**: We use [vitest](https://github.com/vitejs/vitest) for unit testing.\n\n ## Folder Directory Structure\n\n The folder directory structure of LobeChat is as follows:\n\n \\`\\`\\`bash\n src\n ├── app # Main logic and state management related code of the application\n ├── components # Reusable UI components\n ├── config # Application configuration files, including client environment variables and server environment variables\n ├── const # Used to define constants, such as action types, route names, etc.\n ├── features # Function modules related to business functions, such as Agent settings, plugin development pop-ups, etc.\n ├── hooks # Custom utility Hooks reused throughout the application\n ├── layout # Layout components of the application, such as navigation bar, sidebar, etc.\n ├── locales # Language files for internationalization\n ├── services # Encapsulated backend service interfaces, such as HTTP requests\n ├── store # Zustand store for state management\n ├── types # TypeScript type definition files\n └── utils # Common utility functions\n \\`\\`\\`\n", + tts: { + showAllLocaleVoice: false, + sttLocale: 'auto', + ttsService: 'openai', + voice: { openai: 'alloy' }, + }, + chatConfig: { + autoCreateTopicThreshold: 2, + displayMode: 'chat', + enableAutoCreateTopic: true, + historyCount: 1, + }, + }, + group: 'XlUbvOvL', + meta: { + avatar: '📝', + description: + 'LobeChat is an AI conversation application built with the Next.js framework. I will help you write the development documentation for LobeChat.', + tags: [ + 'Development Documentation', + 'Technical Introduction', + 'next-js', + 'react', + 'lobe-chat', + ], + title: 'LobeChat Technical Documentation Expert', + }, + type: 'agent', + createdAt: '2024-01-24T16:43:12.164Z', + id: 'a5fefc88-f6c1-44fb-9e98-3d366b1ed589', + updatedAt: '2024-01-24T16:46:15.226Z', + pinned: false, + }, + ], + topics: [ + { + title: 'Default Topic', + sessionId: 'inbox', + createdAt: 1714236590531, + id: '2wcF8yaS', + updatedAt: 1714236590531, + }, + { + title: 'Default Topic', + sessionId: 'a5fefc88-f6c1-44fb-9e98-3d366b1ed589', + createdAt: 1717080410825, + id: 'v38snJ0A', + updatedAt: 1717080410825, + }, + ], + version: mockImportData.version, + }, + { onSuccess: (res) => (result = res) }, + ); + + expect(result!).toEqual({ + sessionGroups: { added: 1, errors: 0, skips: 0 }, + sessions: { added: 1, errors: 0, skips: 0 }, + topics: { added: 2, errors: 0, skips: 0 }, + messages: { added: 4, errors: 0, skips: 0 }, + }); + }); + + it('should import real world data', async () => { + let result: ImportResults; + + await service.importData( + { ...(mockImportData.state as any), version: mockImportData.version }, + { + onSuccess: (res) => { + result = res; + }, + }, + ); + + expect(result!).toEqual({ + sessionGroups: { added: 2, errors: 0, skips: 0 }, + sessions: { added: 15, errors: 0, skips: 0 }, + topics: { added: 4, errors: 0, skips: 0 }, + messages: { added: 32, errors: 0, skips: 0 }, + }); + }); + }); +}); diff --git a/src/services/import/pglite.ts b/src/services/import/pglite.ts new file mode 100644 index 000000000000..0dceca6f8fef --- /dev/null +++ b/src/services/import/pglite.ts @@ -0,0 +1,34 @@ +import { clientDB } from '@/database/client/db'; +import { DataImporterRepos } from '@/database/repositories/dataImporter'; +import { BaseClientService } from '@/services/baseClientService'; +import { useUserStore } from '@/store/user'; +import { ImportStage, ImporterEntryData, OnImportCallbacks } from '@/types/importer'; +import { UserSettings } from '@/types/user/settings'; + +export class ClientService extends BaseClientService { + private get dataImporter(): DataImporterRepos { + return new DataImporterRepos(clientDB as any, this.userId); + } + + importSettings = async (settings: UserSettings) => { + await useUserStore.getState().importAppSettings(settings); + }; + + importData = async (data: ImporterEntryData, callbacks?: OnImportCallbacks) => { + callbacks?.onStageChange?.(ImportStage.Importing); + const time = Date.now(); + try { + const result = await this.dataImporter.importData(data); + const duration = Date.now() - time; + + callbacks?.onStageChange?.(ImportStage.Success); + callbacks?.onSuccess?.(result, duration); + } catch (e) { + console.error(e); + callbacks?.onStageChange?.(ImportStage.Error); + const error = e as Error; + + callbacks?.onError?.({ code: 'ImportError', httpStatus: 0, message: error.message }); + } + }; +} diff --git a/src/services/message/client.ts b/src/services/message/client.ts index d51d6d72a1a4..221905da7805 100644 --- a/src/services/message/client.ts +++ b/src/services/message/client.ts @@ -21,6 +21,7 @@ export class ClientService implements IMessageService { return id; } + // @ts-ignore async batchCreateMessages(messages: ChatMessage[]) { return MessageModel.batchCreate(messages); } @@ -70,6 +71,7 @@ export class ClientService implements IMessageService { return MessageModel.update(id, { error }); } + // @ts-ignore async updateMessage(id: string, message: Partial) { return MessageModel.update(id, message); } diff --git a/src/services/message/index.test.ts b/src/services/message/index.test.ts deleted file mode 100644 index 625261f53c90..000000000000 --- a/src/services/message/index.test.ts +++ /dev/null @@ -1,48 +0,0 @@ -import { Mock, describe, expect, it, vi } from 'vitest'; - -import { CreateMessageParams, MessageModel } from '@/database/_deprecated/models/message'; -import { ChatMessage, ChatMessageError, ChatPluginPayload } from '@/types/message'; - -import { messageService } from './index'; - -// Mock the MessageModel -vi.mock('@/database/_deprecated/models/message', () => { - return { - MessageModel: { - count: vi.fn(), - }, - }; -}); - -describe('MessageService', () => { - beforeEach(() => { - // Reset all mocks before running each test case - vi.resetAllMocks(); - }); - - describe('hasMessages', () => { - it('should return true if there are messages', async () => { - // Setup - (MessageModel.count as Mock).mockResolvedValue(1); - - // Execute - const hasMessages = await messageService.hasMessages(); - - // Assert - expect(MessageModel.count).toHaveBeenCalled(); - expect(hasMessages).toBe(true); - }); - - it('should return false if there are no messages', async () => { - // Setup - (MessageModel.count as Mock).mockResolvedValue(0); - - // Execute - const hasMessages = await messageService.hasMessages(); - - // Assert - expect(MessageModel.count).toHaveBeenCalled(); - expect(hasMessages).toBe(false); - }); - }); -}); diff --git a/src/services/message/index.ts b/src/services/message/index.ts index 930eaf6034ed..2791b5ab692e 100644 --- a/src/services/message/index.ts +++ b/src/services/message/index.ts @@ -1,5 +1,9 @@ -import { ClientService } from './client'; +import { ClientService as DeprecatedService } from './client'; +import { ClientService } from './pglite'; import { ServerService } from './server'; +const clientService = + process.env.NEXT_PUBLIC_CLIENT_DB === 'pglite' ? new ClientService() : new DeprecatedService(); + export const messageService = - process.env.NEXT_PUBLIC_SERVICE_MODE === 'server' ? new ServerService() : new ClientService(); + process.env.NEXT_PUBLIC_SERVICE_MODE === 'server' ? new ServerService() : clientService; diff --git a/src/services/message/pglite.test.ts b/src/services/message/pglite.test.ts new file mode 100644 index 000000000000..23629732859c --- /dev/null +++ b/src/services/message/pglite.test.ts @@ -0,0 +1,430 @@ +import dayjs from 'dayjs'; +import { and, eq } from 'drizzle-orm'; +import { Mock, afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; + +import { MessageModel } from '@/database/_deprecated/models/message'; +import { clientDB, initializeDB } from '@/database/client/db'; +import { + MessageItem, + files, + messagePlugins, + messageTTS, + messageTranslates, + messages, + sessions, + topics, + users, +} from '@/database/schemas'; +import { + ChatMessage, + ChatMessageError, + ChatTTS, + ChatTranslate, + CreateMessageParams, +} from '@/types/message'; + +import { ClientService } from './pglite'; + +const userId = 'message-db'; +const sessionId = '1'; +const topicId = 'topic-id'; + +// Mock data +const mockMessageId = 'mock-message-id'; +const mockMessage = { + id: mockMessageId, + content: 'Mock message content', + sessionId, + role: 'user', +} as ChatMessage; + +const mockMessages = [mockMessage]; + +beforeEach(async () => { + await initializeDB(); + + // 在每个测试用例之前,清空表 + await clientDB.transaction(async (trx) => { + await trx.delete(users); + await trx.insert(users).values([{ id: userId }, { id: '456' }]); + + await trx.insert(sessions).values([{ id: sessionId, userId }]); + await trx.insert(topics).values([{ id: topicId, sessionId, userId }]); + await trx.insert(files).values({ + id: 'f1', + userId: userId, + url: 'abc', + name: 'file-1', + fileType: 'image/png', + size: 1000, + }); + }); +}); + +afterEach(async () => { + // 在每个测试用例之后,清空表 + await clientDB.delete(users); +}); + +const messageService = new ClientService(userId); + +describe('MessageClientService', () => { + describe('create', () => { + it('should create a message and return its id', async () => { + // Setup + const createParams: CreateMessageParams = { + content: 'New message content', + sessionId, + role: 'user', + }; + + // Execute + const messageId = await messageService.createMessage(createParams); + + // Assert + expect(messageId).toMatch(/^msg_/); + }); + }); + + describe('batchCreate', () => { + it('should batch create messages', async () => { + // Execute + await messageService.batchCreateMessages([ + { + content: 'Mock message content', + sessionId, + role: 'user', + }, + { + content: 'Mock message content', + sessionId, + role: 'user', + }, + ] as MessageItem[]); + const count = await clientDB.$count(messages); + + // Assert + expect(count).toBe(2); + }); + }); + + describe('removeMessage', () => { + it('should remove a message by id', async () => { + // Execute + await clientDB.insert(messages).values({ id: mockMessageId, role: 'user', userId }); + await messageService.removeMessage(mockMessageId); + + // Assert + const count = await clientDB.$count(messages); + + expect(count).toBe(0); + }); + }); + describe('removeMessages', () => { + it('should remove a message by id', async () => { + // Setup + await clientDB.insert(messages).values([ + { id: mockMessageId, role: 'user', userId }, + { role: 'assistant', userId }, + ]); + + // Execute + await messageService.removeMessages([mockMessageId]); + + // Assert + const count = await clientDB.$count(messages); + + expect(count).toBe(1); + }); + }); + + describe('getMessages', () => { + it('should retrieve messages by sessionId and topicId', async () => { + // Setup + await clientDB + .insert(messages) + .values({ id: mockMessageId, sessionId, topicId, role: 'user', userId }); + + // Execute + const data = await messageService.getMessages(sessionId, topicId); + + // Assert + expect(data[0]).toMatchObject({ id: mockMessageId, role: 'user' }); + }); + }); + + describe('getAllMessagesInSession', () => { + it('should retrieve all messages in a session', async () => { + // Setup + const sessionId = 'session-id'; + await clientDB.insert(sessions).values([ + { id: 'bbb', userId }, + { id: sessionId, userId }, + ]); + await clientDB.insert(messages).values([ + { sessionId, topicId, role: 'user', userId }, + { sessionId, topicId, role: 'assistant', userId }, + { sessionId: 'bbb', topicId, role: 'assistant', userId }, + ]); + + // Execute + const data = await messageService.getAllMessagesInSession(sessionId); + + // Assert + expect(data.length).toBe(2); + }); + }); + + describe('removeMessagesByAssistant', () => { + it('should batch remove messages by assistantId and topicId', async () => { + // Setup + const assistantId = 'assistant-id'; + const sessionId = 'session-id'; + await clientDB.insert(sessions).values([ + { id: 'bbb', userId }, + { id: sessionId, userId }, + ]); + await clientDB.insert(messages).values([ + { sessionId, topicId, role: 'user', userId }, + { sessionId, topicId, role: 'assistant', userId }, + { sessionId: 'bbb', topicId, role: 'assistant', userId }, + ]); + + // Execute + await messageService.removeMessagesByAssistant(sessionId, topicId); + + // Assert + const result = await clientDB.query.messages.findMany({ + where: and(eq(messages.sessionId, sessionId), eq(messages.topicId, topicId)), + }); + + expect(result.length).toBe(0); + }); + }); + + describe('clearAllMessage', () => { + it('should clear all messages from the table', async () => { + // Setup + await clientDB.insert(users).values({ id: 'another' }); + await clientDB.insert(messages).values([ + { id: mockMessageId, role: 'user', userId }, + { role: 'user', userId: 'another' }, + ]); + + // Execute + await messageService.removeAllMessages(); + + // Assert + const result = await clientDB.query.messages.findMany({ + where: eq(messages.userId, userId), + }); + expect(result.length).toBe(0); + }); + }); + + describe('getAllMessages', () => { + it('should retrieve all messages', async () => { + await clientDB.insert(messages).values([ + { sessionId, topicId, content: '1', role: 'user', userId }, + { sessionId, topicId, content: '2', role: 'assistant', userId }, + ]); + + // Execute + const data = await messageService.getAllMessages(); + + // Assert + expect(data).toMatchObject([ + { sessionId, topicId, content: '1', role: 'user', userId }, + { sessionId, topicId, content: '2', role: 'assistant', userId }, + ]); + }); + }); + + describe('updateMessageError', () => { + it('should update the error field of a message', async () => { + // Setup + await clientDB.insert(messages).values({ id: mockMessageId, role: 'user', userId }); + const newError = { + type: 'InvalidProviderAPIKey', + message: 'Error occurred', + } as ChatMessageError; + + // Execute + await messageService.updateMessageError(mockMessageId, newError); + + // Assert + const result = await clientDB.query.messages.findFirst({ + where: eq(messages.id, mockMessageId), + }); + + expect(result!.error).toEqual(newError); + }); + }); + + // describe('updateMessagePlugin', () => { + // it('should update the plugin payload of a message', async () => { + // // Setup + // const newPlugin = { + // type: 'default', + // apiName: 'abc', + // arguments: '', + // identifier: 'plugin1', + // } as ChatPluginPayload; + // + // (MessageModel.update as Mock).mockResolvedValue({ ...mockMessage, plugin: newPlugin }); + // + // // Execute + // const result = await messageService.updateMessagePlugin(mockMessageId, newPlugin); + // + // // Assert + // expect(MessageModel.update).toHaveBeenCalledWith(mockMessageId, { plugin: newPlugin }); + // expect(result).toEqual({ ...mockMessage, plugin: newPlugin }); + // }); + // }); + + describe('updateMessagePluginState', () => { + it('should update the plugin state of a message', async () => { + // Setup + await clientDB.insert(messages).values({ id: mockMessageId, role: 'user', userId }); + await clientDB.insert(messagePlugins).values({ id: mockMessageId }); + const key = 'stateKey'; + const value = 'stateValue'; + const newPluginState = { [key]: value }; + + // Execute + await messageService.updateMessagePluginState(mockMessageId, { stateKey: value }); + + // Assert + const result = await clientDB.query.messagePlugins.findFirst({ + where: eq(messagePlugins.id, mockMessageId), + }); + expect(result!.state).toEqual(newPluginState); + }); + }); + + describe('updateMessagePluginArguments', () => { + it('should update the plugin arguments object of a message', async () => { + // Setup + await clientDB.insert(messages).values({ id: mockMessageId, role: 'user', userId }); + await clientDB.insert(messagePlugins).values({ id: mockMessageId }); + const value = 'stateValue'; + + // Execute + await messageService.updateMessagePluginArguments(mockMessageId, { key: value }); + + // Assert + const result = await clientDB.query.messagePlugins.findFirst({ + where: eq(messageTTS.id, mockMessageId), + }); + expect(result).toMatchObject({ arguments: '{"key":"stateValue"}' }); + }); + it('should update the plugin arguments string of a message', async () => { + // Setup + await clientDB.insert(messages).values({ id: mockMessageId, role: 'user', userId }); + await clientDB.insert(messagePlugins).values({ id: mockMessageId }); + const value = 'stateValue'; + // Execute + await messageService.updateMessagePluginArguments( + mockMessageId, + JSON.stringify({ abc: value }), + ); + + // Assert + const result = await clientDB.query.messagePlugins.findFirst({ + where: eq(messageTTS.id, mockMessageId), + }); + expect(result).toMatchObject({ arguments: '{"abc":"stateValue"}' }); + }); + }); + + describe('countMessages', () => { + it('should count the total number of messages', async () => { + // Setup + await clientDB.insert(messages).values({ id: mockMessageId, role: 'user', userId }); + + // Execute + const count = await messageService.countMessages(); + + // Assert + expect(count).toBe(1); + }); + }); + + describe('countTodayMessages', () => { + it('should count the number of messages created today', async () => { + // Setup + const mockMessages = [ + { ...mockMessage, id: undefined, createdAt: new Date(), userId }, + { ...mockMessage, id: undefined, createdAt: new Date(), userId }, + { ...mockMessage, id: undefined, createdAt: new Date('2023-01-01'), userId }, + ]; + await clientDB.insert(messages).values(mockMessages); + + // Execute + const count = await messageService.countTodayMessages(); + + // Assert + expect(count).toBe(2); + }); + }); + + describe('updateMessageTTS', () => { + it('should update the TTS field of a message', async () => { + // Setup + await clientDB + .insert(files) + .values({ id: 'file-abc', fileType: 'text', name: 'abc', url: 'abc', size: 100, userId }); + await clientDB.insert(messages).values({ id: mockMessageId, role: 'user', userId }); + const newTTS: ChatTTS = { contentMd5: 'abc', file: 'file-abc' }; + + // Execute + await messageService.updateMessageTTS(mockMessageId, newTTS); + + // Assert + const result = await clientDB.query.messageTTS.findFirst({ + where: eq(messageTTS.id, mockMessageId), + }); + + expect(result).toMatchObject({ contentMd5: 'abc', fileId: 'file-abc', id: mockMessageId }); + }); + }); + + describe('updateMessageTranslate', () => { + it('should update the translate field of a message', async () => { + // Setup + await clientDB.insert(messages).values({ id: mockMessageId, role: 'user', userId }); + const newTranslate: ChatTranslate = { content: 'Translated text', to: 'es' }; + + // Execute + await messageService.updateMessageTranslate(mockMessageId, newTranslate); + + // Assert + const result = await clientDB.query.messageTranslates.findFirst({ + where: eq(messageTranslates.id, mockMessageId), + }); + + expect(result).toMatchObject(newTranslate); + }); + }); + + describe('hasMessages', () => { + it('should return true if there are messages', async () => { + // Setup + await clientDB.insert(messages).values({ id: mockMessageId, role: 'user', userId }); + + // Execute + const result = await messageService.hasMessages(); + + // Assert + expect(result).toBe(true); + }); + + it('should return false if there are no messages', async () => { + // Execute + const result = await messageService.hasMessages(); + + // Assert + expect(result).toBe(false); + }); + }); +}); diff --git a/src/services/message/pglite.ts b/src/services/message/pglite.ts new file mode 100644 index 000000000000..f6ad84d53925 --- /dev/null +++ b/src/services/message/pglite.ts @@ -0,0 +1,118 @@ +import dayjs from 'dayjs'; + +import { INBOX_SESSION_ID } from '@/const/session'; +import { clientDB } from '@/database/client/db'; +import { MessageItem } from '@/database/schemas'; +import { MessageModel } from '@/database/server/models/message'; +import { BaseClientService } from '@/services/baseClientService'; +import { + ChatMessage, + ChatMessageError, + ChatTTS, + ChatTranslate, + CreateMessageParams, +} from '@/types/message'; + +import { IMessageService } from './type'; + +export class ClientService extends BaseClientService implements IMessageService { + private get messageModel(): MessageModel { + return new MessageModel(clientDB as any, this.userId); + } + + async createMessage({ sessionId, ...params }: CreateMessageParams) { + const { id } = await this.messageModel.create({ + ...params, + sessionId: this.toDbSessionId(sessionId) as string, + }); + + return id; + } + + async batchCreateMessages(messages: MessageItem[]) { + return this.messageModel.batchCreate(messages); + } + + async getMessages(sessionId: string, topicId?: string) { + const data = await this.messageModel.query({ + sessionId: this.toDbSessionId(sessionId), + topicId, + }); + + return data as unknown as ChatMessage[]; + } + + async getAllMessages() { + const data = await this.messageModel.queryAll(); + + return data as unknown as ChatMessage[]; + } + + async countMessages() { + return this.messageModel.count(); + } + + async countTodayMessages() { + const topics = await this.messageModel.queryAll(); + return topics.filter( + (item) => dayjs(item.createdAt).format('YYYY-MM-DD') === dayjs().format('YYYY-MM-DD'), + ).length; + } + + async getAllMessagesInSession(sessionId: string) { + const data = this.messageModel.queryBySessionId(this.toDbSessionId(sessionId)); + + return data as unknown as ChatMessage[]; + } + + async updateMessageError(id: string, error: ChatMessageError) { + return this.messageModel.update(id, { error }); + } + + async updateMessage(id: string, message: Partial) { + return this.messageModel.update(id, message); + } + + async updateMessageTTS(id: string, tts: Partial | false) { + return this.messageModel.updateTTS(id, tts as any); + } + + async updateMessageTranslate(id: string, translate: Partial | false) { + return this.messageModel.updateTranslate(id, translate as any); + } + + async updateMessagePluginState(id: string, value: Record) { + return this.messageModel.updatePluginState(id, value); + } + + async updateMessagePluginArguments(id: string, value: string | Record) { + const args = typeof value === 'string' ? value : JSON.stringify(value); + + return this.messageModel.updateMessagePlugin(id, { arguments: args }); + } + + async removeMessage(id: string) { + return this.messageModel.deleteMessage(id); + } + + async removeMessages(ids: string[]) { + return this.messageModel.deleteMessages(ids); + } + + async removeMessagesByAssistant(sessionId: string, topicId?: string) { + return this.messageModel.deleteMessagesBySession(this.toDbSessionId(sessionId), topicId); + } + + async removeAllMessages() { + return this.messageModel.deleteAllMessages(); + } + + async hasMessages() { + const number = await this.countMessages(); + return number > 0; + } + + private toDbSessionId(sessionId: string | undefined) { + return sessionId === INBOX_SESSION_ID ? undefined : sessionId; + } +} diff --git a/src/services/message/server.ts b/src/services/message/server.ts index 6562b4ec3e25..3e1d9556d7cd 100644 --- a/src/services/message/server.ts +++ b/src/services/message/server.ts @@ -1,5 +1,6 @@ /* eslint-disable @typescript-eslint/no-unused-vars */ import { INBOX_SESSION_ID } from '@/const/session'; +import { MessageItem } from '@/database/schemas'; import { lambdaClient } from '@/libs/trpc/client'; import { ChatMessage, @@ -19,20 +20,23 @@ export class ServerService implements IMessageService { }); } - batchCreateMessages(messages: ChatMessage[]): Promise { + batchCreateMessages(messages: MessageItem[]): Promise { return lambdaClient.message.batchCreateMessages.mutate(messages); } - getMessages(sessionId?: string, topicId?: string | undefined): Promise { - return lambdaClient.message.getMessages.query({ + getMessages = async (sessionId?: string, topicId?: string | undefined) => { + const data = await lambdaClient.message.getMessages.query({ sessionId: this.toDbSessionId(sessionId), topicId, }); - } + + return data as unknown as ChatMessage[]; + }; getAllMessages(): Promise { return lambdaClient.message.getAllMessages.query(); } + getAllMessagesInSession(sessionId: string): Promise { return lambdaClient.message.getAllMessagesInSession.query({ sessionId: this.toDbSessionId(sessionId), @@ -63,7 +67,7 @@ export class ServerService implements IMessageService { return lambdaClient.message.updateMessagePlugin.mutate({ id, value: { arguments: args } }); } - updateMessage(id: string, message: Partial): Promise { + updateMessage(id: string, message: Partial): Promise { return lambdaClient.message.update.mutate({ id, value: message }); } @@ -79,10 +83,6 @@ export class ServerService implements IMessageService { return lambdaClient.message.updatePluginState.mutate({ id, value }); } - bindMessagesToTopic(_topicId: string, _messageIds: string[]): Promise { - throw new Error('Method not implemented.'); - } - removeMessage(id: string): Promise { return lambdaClient.message.removeMessage.mutate({ id }); } diff --git a/src/services/message/type.ts b/src/services/message/type.ts index 2929620ad128..4ac08fd54c17 100644 --- a/src/services/message/type.ts +++ b/src/services/message/type.ts @@ -1,4 +1,4 @@ -import { DB_Message } from '@/database/_deprecated/schemas/message'; +import { MessageItem } from '@/database/schemas'; import { ChatMessage, ChatMessageError, @@ -11,7 +11,7 @@ import { export interface IMessageService { createMessage(data: CreateMessageParams): Promise; - batchCreateMessages(messages: ChatMessage[]): Promise; + batchCreateMessages(messages: MessageItem[]): Promise; getMessages(sessionId: string, topicId?: string): Promise; getAllMessages(): Promise; @@ -20,11 +20,10 @@ export interface IMessageService { countTodayMessages(): Promise; updateMessageError(id: string, error: ChatMessageError): Promise; - updateMessage(id: string, message: Partial): Promise; + updateMessage(id: string, message: Partial): Promise; updateMessageTTS(id: string, tts: Partial | false): Promise; updateMessageTranslate(id: string, translate: Partial | false): Promise; updateMessagePluginState(id: string, value: Record): Promise; - bindMessagesToTopic(topicId: string, messageIds: string[]): Promise; removeMessage(id: string): Promise; removeMessages(ids: string[]): Promise; diff --git a/src/services/plugin/index.ts b/src/services/plugin/index.ts index 77b3ab38869b..e06694579939 100644 --- a/src/services/plugin/index.ts +++ b/src/services/plugin/index.ts @@ -1,5 +1,9 @@ -import { ClientService } from './client'; +import { ClientService as DeprecatedService } from './client'; +import { ClientService } from './pglite'; import { ServerService } from './server'; +const clientService = + process.env.NEXT_PUBLIC_CLIENT_DB === 'pglite' ? new ClientService() : new DeprecatedService(); + export const pluginService = - process.env.NEXT_PUBLIC_SERVICE_MODE === 'server' ? new ServerService() : new ClientService(); + process.env.NEXT_PUBLIC_SERVICE_MODE === 'server' ? new ServerService() : clientService; diff --git a/src/services/plugin/pglite.test.ts b/src/services/plugin/pglite.test.ts new file mode 100644 index 000000000000..018c4eaab4ef --- /dev/null +++ b/src/services/plugin/pglite.test.ts @@ -0,0 +1,175 @@ +import { LobeChatPluginManifest } from '@lobehub/chat-plugin-sdk'; +import { eq } from 'drizzle-orm'; +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; + +import { clientDB, initializeDB } from '@/database/client/db'; +import { installedPlugins, users } from '@/database/schemas'; +import { LobeTool } from '@/types/tool'; +import { LobeToolCustomPlugin } from '@/types/tool/plugin'; + +import { ClientService } from './pglite'; +import { InstallPluginParams } from './type'; + +// Mocking modules and functions + +const userId = 'message-db'; +const pluginService = new ClientService(userId); + +// Mock data +beforeEach(async () => { + await initializeDB(); + + // 在每个测试用例之前,重置表数据 + await clientDB.transaction(async (trx) => { + await trx.delete(users); + await trx.insert(users).values([{ id: userId }, { id: '456' }]); + }); +}); + +describe('PluginService', () => { + describe('installPlugin', () => { + it('should install a plugin', async () => { + // Arrange + const fakePlugin = { + identifier: 'test-plugin-d', + manifest: { name: 'TestPlugin', version: '1.0.0' } as unknown as LobeChatPluginManifest, + type: 'plugin', + } as InstallPluginParams; + + // Act + await pluginService.installPlugin(fakePlugin); + + // Assert + const result = await clientDB.query.installedPlugins.findFirst({ + where: eq(installedPlugins.identifier, fakePlugin.identifier), + }); + expect(result).toMatchObject(fakePlugin); + }); + }); + + describe('getInstalledPlugins', () => { + it('should return a list of installed plugins', async () => { + // Arrange + const fakePlugins = [{ identifier: 'test-plugin', type: 'plugin' }] as LobeTool[]; + await clientDB + .insert(installedPlugins) + .values([{ identifier: 'test-plugin', type: 'plugin', userId }]); + // Act + const data = await pluginService.getInstalledPlugins(); + + // Assert + expect(data).toMatchObject(fakePlugins); + }); + }); + + describe('uninstallPlugin', () => { + it('should uninstall a plugin', async () => { + // Arrange + const identifier = 'test-plugin'; + await clientDB.insert(installedPlugins).values([{ identifier, type: 'plugin', userId }]); + + // Act + await pluginService.uninstallPlugin(identifier); + + // Assert + const result = await clientDB.query.installedPlugins.findFirst({ + where: eq(installedPlugins.identifier, identifier), + }); + expect(result).toBe(undefined); + }); + }); + + describe('createCustomPlugin', () => { + it('should create a custom plugin', async () => { + // Arrange + const customPlugin = { + identifier: 'custom-plugin-x', + manifest: {}, + type: 'customPlugin', + } as LobeToolCustomPlugin; + + // Act + await pluginService.createCustomPlugin(customPlugin); + + // Assert + const result = await clientDB.query.installedPlugins.findFirst({ + where: eq(installedPlugins.identifier, customPlugin.identifier), + }); + expect(result).toMatchObject(customPlugin); + }); + }); + + describe('updatePlugin', () => { + it('should update a plugin', async () => { + // Arrange + const identifier = 'plugin-id'; + const value = { customParams: { ab: '1' } } as unknown as LobeToolCustomPlugin; + await clientDB.insert(installedPlugins).values([{ identifier, type: 'plugin', userId }]); + + // Act + await pluginService.updatePlugin(identifier, value); + + // Assert + const result = await clientDB.query.installedPlugins.findFirst({ + where: eq(installedPlugins.identifier, identifier), + }); + expect(result).toMatchObject(value); + }); + }); + + describe('updatePluginManifest', () => { + it('should update a plugin manifest', async () => { + // Arrange + const identifier = 'plugin-id'; + const manifest = { name: 'NewPluginManifest' } as unknown as LobeChatPluginManifest; + await clientDB.insert(installedPlugins).values([{ identifier, type: 'plugin', userId }]); + + // Act + await pluginService.updatePluginManifest(identifier, manifest); + + // Assert + const result = await clientDB.query.installedPlugins.findFirst({ + where: eq(installedPlugins.identifier, identifier), + }); + expect(result).toMatchObject({ manifest }); + }); + }); + + describe('removeAllPlugins', () => { + it('should remove all plugins', async () => { + // Arrange + await clientDB.insert(installedPlugins).values([ + { identifier: '123', type: 'plugin', userId }, + { identifier: '234', type: 'plugin', userId }, + ]); + + // Act + await pluginService.removeAllPlugins(); + + // Assert + const result = await clientDB.query.installedPlugins.findMany({ + where: eq(installedPlugins.userId, userId), + }); + expect(result.length).toEqual(0); + }); + }); + + describe('updatePluginSettings', () => { + it('should update plugin settings', async () => { + // Arrange + const id = 'plugin-id'; + const settings = { color: 'blue' }; + await clientDB.insert(installedPlugins).values([{ identifier: id, type: 'plugin', userId }]); + + // Act + await pluginService.updatePluginSettings(id, settings); + + // Assert + const result = await clientDB.query.installedPlugins.findFirst({ + where: eq(installedPlugins.identifier, id), + }); + + expect(result).toMatchObject({ settings }); + }); + }); +}); diff --git a/src/services/plugin/pglite.ts b/src/services/plugin/pglite.ts new file mode 100644 index 000000000000..b8cb130566aa --- /dev/null +++ b/src/services/plugin/pglite.ts @@ -0,0 +1,51 @@ +import { LobeChatPluginManifest } from '@lobehub/chat-plugin-sdk'; + +import { clientDB } from '@/database/client/db'; +import { PluginModel } from '@/database/server/models/plugin'; +import { BaseClientService } from '@/services/baseClientService'; +import { LobeTool } from '@/types/tool'; +import { LobeToolCustomPlugin } from '@/types/tool/plugin'; + +import { IPluginService, InstallPluginParams } from './type'; + +export class ClientService extends BaseClientService implements IPluginService { + private get pluginModel(): PluginModel { + return new PluginModel(clientDB as any, this.userId); + } + + installPlugin = async (plugin: InstallPluginParams) => { + await this.pluginModel.create(plugin); + return; + }; + + getInstalledPlugins = () => { + return this.pluginModel.query() as Promise; + }; + + async uninstallPlugin(identifier: string) { + await this.pluginModel.delete(identifier); + return; + } + + async createCustomPlugin(customPlugin: LobeToolCustomPlugin) { + await this.pluginModel.create({ ...customPlugin, type: 'customPlugin' }); + return; + } + + async updatePlugin(id: string, value: LobeToolCustomPlugin) { + await this.pluginModel.update(id, value); + return; + } + async updatePluginManifest(id: string, manifest: LobeChatPluginManifest) { + await this.pluginModel.update(id, { manifest }); + } + + async removeAllPlugins() { + await this.pluginModel.deleteAll(); + } + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + async updatePluginSettings(id: string, settings: any, _?: AbortSignal) { + await this.pluginModel.update(id, { settings }); + } +} diff --git a/src/services/session/index.ts b/src/services/session/index.ts index 0d55e3cc45e5..b4397dd700a8 100644 --- a/src/services/session/index.ts +++ b/src/services/session/index.ts @@ -1,5 +1,9 @@ -import { ClientService } from './client'; +import { ClientService as DeprecatedService } from './client'; +import { ClientService } from './pglite'; import { ServerService } from './server'; +const clientService = + process.env.NEXT_PUBLIC_CLIENT_DB === 'pglite' ? new ClientService() : new DeprecatedService(); + export const sessionService = - process.env.NEXT_PUBLIC_SERVICE_MODE === 'server' ? new ServerService() : new ClientService(); + process.env.NEXT_PUBLIC_SERVICE_MODE === 'server' ? new ServerService() : clientService; diff --git a/src/services/session/pglite.test.ts b/src/services/session/pglite.test.ts new file mode 100644 index 000000000000..832259a13d8a --- /dev/null +++ b/src/services/session/pglite.test.ts @@ -0,0 +1,411 @@ +import { eq, not } from 'drizzle-orm/expressions'; +import { Mock, beforeEach, describe, expect, it, vi } from 'vitest'; + +import { INBOX_SESSION_ID } from '@/const/session'; +import { clientDB, initializeDB } from '@/database/client/db'; +import { + NewSession, + SessionItem, + agents, + agentsToSessions, + sessionGroups, + sessions, + users, +} from '@/database/schemas'; +import { LobeAgentChatConfig, LobeAgentConfig } from '@/types/agent'; +import { LobeAgentSession, LobeSessionType, SessionGroups } from '@/types/session'; + +import { ClientService } from './pglite'; + +const userId = 'message-db'; +const sessionService = new ClientService(userId); + +const mockSessionId = 'mock-session-id'; +const mockAgentId = 'agent-id'; + +// Mock data +beforeEach(async () => { + await initializeDB(); + + // 在每个测试用例之前,清空表 + await clientDB.transaction(async (trx) => { + await trx.insert(users).values([{ id: userId }, { id: '456' }]); + await trx.insert(sessions).values([{ id: mockSessionId, userId }]); + await trx.insert(agents).values([{ id: mockAgentId, userId }]); + await trx.insert(agentsToSessions).values([{ agentId: mockAgentId, sessionId: mockSessionId }]); + await trx.insert(sessionGroups).values([ + { id: 'group-1', name: 'group-A', sort: 2, userId }, + { id: 'group-2', name: 'group-B', sort: 1, userId }, + { id: 'group-4', name: 'group-C', sort: 1, userId: '456' }, + ]); + }); +}); + +afterEach(async () => { + // 在每个测试用例之后,清空表 + await clientDB.delete(users); +}); + +describe('SessionService', () => { + const mockSession = { + id: mockSessionId, + type: 'agent', + meta: { title: 'Mock Session' }, + } as LobeAgentSession; + + describe('createSession', () => { + it('should create a new session and return its id', async () => { + // Setup + const sessionType = LobeSessionType.Agent; + const defaultValue = { meta: { title: 'New Session' } } as Partial; + + // Execute + const sessionId = await sessionService.createSession(sessionType, defaultValue); + + // Assert + expect(sessionId).toMatch(/^ssn_/); + }); + }); + + describe('removeSession', () => { + it('should remove a session by its id', async () => { + // Execute + await sessionService.removeSession(mockSessionId); + + // Assert + + const result = await clientDB.query.sessions.findFirst({ + where: eq(sessions.id, mockSessionId), + }); + // Assert + expect(result).toBeUndefined(); + }); + }); + + describe('removeAllSessions', () => { + it('should clear all sessions from the table', async () => { + // Setup + await clientDB + .insert(sessions) + .values([{ userId: userId }, { userId: userId }, { userId: userId }]); + + // Execute + await sessionService.removeAllSessions(); + + // Assert + const result = await clientDB.query.sessions.findMany({ + where: eq(sessionGroups.userId, userId), + }); + + expect(result.length).toBe(0); + }); + }); + + describe('updateSession', () => { + it('should update the group of a session', async () => { + // Setup + const groupId = 'group-1'; + + // Execute + await sessionService.updateSession(mockSessionId, { group: groupId }); + + // Assert + const result = await clientDB.query.sessions.findFirst({ + where: eq(sessions.id, mockSessionId), + }); + expect(result).toMatchObject({ groupId }); + }); + + it('should update the pinned status of a session', async () => { + // Setup + const pinned = true; + + // Execute + await sessionService.updateSession(mockSessionId, { pinned }); + + // Assert + const result = await clientDB.query.sessions.findFirst({ + where: eq(sessions.id, mockSessionId), + }); + + expect(result!.pinned).toBeTruthy(); + }); + }); + + describe('updateSessionConfig', () => { + it('should update the config of a session', async () => { + // Setup + const newConfig = { model: 'abc' } as LobeAgentConfig; + + // Execute + await sessionService.updateSessionConfig(mockSessionId, newConfig); + + // Assert + const result = await sessionService.getSessionConfig(mockSessionId); + expect(result).toMatchObject(newConfig); + }); + }); + + describe('countSessions', () => { + it('should return false if no sessions exist', async () => { + await clientDB.delete(sessions); + + // Execute + const result = await sessionService.countSessions(); + + // Assert + expect(result).toBe(0); + }); + + it('should return true if sessions exist', async () => { + // Setup + await clientDB.delete(sessions); + await clientDB.insert(sessions).values([{ userId }]); + + // Execute + const result = await sessionService.countSessions(); + + // Assert + expect(result).toBe(1); + }); + }); + + describe('searchSessions', () => { + it('should return sessions that match the keyword', async () => { + // Setup + await clientDB.insert(agents).values({ userId, id: 'agent-1', title: 'Session Name' }); + await clientDB + .insert(agentsToSessions) + .values({ agentId: 'agent-1', sessionId: mockSessionId }); + + // Execute + const keyword = 'Name'; + const result = await sessionService.searchSessions(keyword); + + // Assert + // TODO: 后续需要把这个搜索的标题和描述都加上,现在这个 client 搜索会有问题 + expect(result).toMatchObject([{ id: mockSessionId }]); + }); + }); + + describe('cloneSession', () => { + it('should duplicate a session and return its id', async () => { + // Setup + const newTitle = 'Duplicated Session'; + const session: NewSession = { + id: 'duplicated-session-id', + title: '123', + userId, + }; + await clientDB.insert(sessions).values([session]); + await clientDB.insert(agents).values({ userId, id: 'agent-1' }); + await clientDB + .insert(agentsToSessions) + .values({ agentId: 'agent-1', sessionId: 'duplicated-session-id' }); + + // Execute + const duplicatedSessionId = await sessionService.cloneSession(mockSessionId, newTitle); + + // Assert + + const result = await clientDB.query.sessions.findFirst({ + where: eq(sessions.id, duplicatedSessionId!), + }); + expect(result).toMatchObject({ title: 'Duplicated Session' }); + }); + }); + + describe('getGroupedSessions', () => { + it('should retrieve sessions with their group', async () => { + // Execute + const sessionsWithGroup = await sessionService.getGroupedSessions(); + + expect(sessionsWithGroup).toMatchObject({ + sessionGroups: [ + { id: 'group-2', name: 'group-B', sort: 1 }, + { id: 'group-1', name: 'group-A', sort: 2 }, + ], + sessions: [{ id: 'mock-session-id', type: 'agent' }], + }); + }); + }); + + describe('getSessionsByType', () => { + it('should get sessions by type "all"', async () => { + const sessions = await sessionService.getSessionsByType('all'); + expect(sessions).toBeDefined(); + }); + + it('should get sessions by type "agent"', async () => { + const sessions = await sessionService.getSessionsByType('agent'); + expect(sessions).toBeDefined(); + }); + + it('should get sessions by type "group"', async () => { + const sessions = await sessionService.getSessionsByType('group'); + expect(sessions).toBeDefined(); + }); + }); + + describe('getSessionConfig', () => { + it.skip('should get default config for INBOX_SESSION_ID', async () => { + const config = await sessionService.getSessionConfig(INBOX_SESSION_ID); + expect(config).toBeDefined(); + }); + + it('should throw error for non-existent session', async () => { + await expect(sessionService.getSessionConfig('non-existent')).rejects.toThrow( + 'Session not found', + ); + }); + }); + + describe('updateSessionMeta', () => { + it('should not update meta for INBOX_SESSION_ID', async () => { + const result = await sessionService.updateSessionMeta(INBOX_SESSION_ID, { + title: 'New Title', + }); + expect(result).toBeUndefined(); + }); + + it('should update meta for normal session', async () => { + const meta = { title: 'Updated Title' }; + await sessionService.updateSessionMeta(mockSessionId, meta); + + const session = await clientDB.query.sessions.findFirst({ + where: eq(sessions.id, mockSessionId), + }); + expect(session).toBeDefined(); + }); + }); + + describe('updateSessionChatConfig', () => { + it('should update chat config', async () => { + const chatConfig = { temperature: 0.8 } as Partial; + const result = await sessionService.updateSessionChatConfig(mockSessionId, chatConfig); + expect(result).toBeDefined(); + }); + }); + + describe('model getters', () => { + it('should return session model instance', () => { + // @ts-ignore - accessing private getter + const model = sessionService.sessionModel; + expect(model).toBeDefined(); + }); + + it('should return session group model instance', () => { + // @ts-ignore - accessing private getter + const model = sessionService.sessionGroupModel; + expect(model).toBeDefined(); + }); + }); + + // SessionGroup related tests + describe('createSessionGroup', () => { + it('should create a new session group and return its id', async () => { + // Setup + const groupName = 'New Group'; + const sort = 1; + + // Execute + const groupId = await sessionService.createSessionGroup(groupName, sort); + + // Assert + expect(groupId).toMatch(/^sg_/); + + const result = await clientDB.query.sessionGroups.findFirst({ + where: eq(sessionGroups.id, groupId), + }); + + expect(result).toMatchObject({ id: groupId, name: groupName, sort }); + }); + }); + + describe('removeSessionGroup', () => { + it('should remove a session group by its id', async () => { + const groupId = 'group-1'; + // Execute + await sessionService.removeSessionGroup(groupId); + + const result = await clientDB.query.sessionGroups.findFirst({ + where: eq(sessionGroups.id, groupId), + }); + // Assert + expect(result).toBeUndefined(); + }); + }); + + describe('clearSessionGroups', () => { + it('should clear all session groups', async () => { + // Execute + await sessionService.removeSessionGroups(); + + // Assert + const result = await clientDB.query.sessionGroups.findMany({ + where: eq(sessionGroups.userId, userId), + }); + + expect(result.length).toBe(0); + + const result2 = await clientDB.query.sessionGroups.findMany({ + where: not(eq(sessionGroups.userId, userId)), + }); + + expect(result2.length).toBeGreaterThan(0); + }); + }); + + describe('getSessionGroups', () => { + it('should retrieve all session groups', async () => { + // Execute + const result = await sessionService.getSessionGroups(); + + // Assert + const groups = [ + { id: 'group-2', name: 'group-B', sort: 1 }, + { id: 'group-1', name: 'group-A', sort: 2 }, + ]; + expect(result).toMatchObject(groups); + }); + }); + + describe('updateSessionGroup', () => { + it('should update a session group', async () => { + // Setup + const groupId = 'group-1'; + const data = { name: 'Updated Group', sort: 2 }; + + // Execute + await sessionService.updateSessionGroup(groupId, data); + + // Assert + const result = await clientDB.query.sessionGroups.findFirst({ + where: eq(sessionGroups.id, groupId), + }); + expect(result).toMatchObject({ id: groupId, ...data }); + }); + }); + + describe('updateSessionGroupOrder', () => { + it('should update the order of session groups', async () => { + // Setup + const sortMap = [ + { id: 'group-1', sort: 2 }, + { id: 'group-2', sort: 1 }, + ]; + + // Execute + await sessionService.updateSessionGroupOrder(sortMap); + + // Assert + const data = await clientDB.query.sessionGroups.findMany({ + where: eq(sessionGroups.userId, userId), + }); + expect(data).toMatchObject([ + { id: 'group-1', sort: 2 }, + { id: 'group-2', sort: 1 }, + ]); + }); + }); +}); diff --git a/src/services/session/pglite.ts b/src/services/session/pglite.ts new file mode 100644 index 000000000000..adfbb9989214 --- /dev/null +++ b/src/services/session/pglite.ts @@ -0,0 +1,184 @@ +import { DeepPartial } from 'utility-types'; + +import { INBOX_SESSION_ID } from '@/const/session'; +import { clientDB } from '@/database/client/db'; +import { AgentItem } from '@/database/schemas'; +import { SessionModel } from '@/database/server/models/session'; +import { SessionGroupModel } from '@/database/server/models/sessionGroup'; +import { BaseClientService } from '@/services/baseClientService'; +import { LobeAgentChatConfig, LobeAgentConfig } from '@/types/agent'; +import { MetaData } from '@/types/meta'; +import { + ChatSessionList, + LobeAgentSession, + LobeSessionType, + LobeSessions, + SessionGroupItem, + SessionGroups, + UpdateSessionParams, +} from '@/types/session'; + +import { ISessionService } from './type'; + +export class ClientService extends BaseClientService implements ISessionService { + private get sessionModel(): SessionModel { + return new SessionModel(clientDB as any, this.userId); + } + + private get sessionGroupModel(): SessionGroupModel { + return new SessionGroupModel(clientDB as any, this.userId); + } + + async createSession(type: LobeSessionType, data: Partial): Promise { + const { config, group, meta, ...session } = data; + + const item = await this.sessionModel.create({ + config: { ...config, ...meta } as any, + session: { ...session, groupId: group }, + type, + }); + if (!item) { + throw new Error('session create Error'); + } + return item.id; + } + + async batchCreateSessions(importSessions: LobeSessions) { + // @ts-ignore + return this.sessionModel.batchCreate(importSessions); + } + + async cloneSession(id: string, newTitle: string): Promise { + const res = await this.sessionModel.duplicate(id, newTitle); + + if (res) return res?.id; + } + + async getGroupedSessions(): Promise { + return this.sessionModel.queryWithGroups(); + } + + async getSessionConfig(id: string): Promise { + const res = await this.sessionModel.findByIdOrSlug(id); + + if (!res) throw new Error('Session not found'); + + return res.agent as LobeAgentConfig; + } + + /** + * 这个方法要对应移除的 + */ + async getSessionsByType(type: 'agent' | 'group' | 'all' = 'all'): Promise { + switch (type) { + // TODO: add a filter to get only agents or agents + case 'group': { + // @ts-ignore + return this.sessionModel.query(); + } + case 'agent': { + // @ts-ignore + return this.sessionModel.query(); + } + + case 'all': { + // @ts-ignore + return this.sessionModel.query(); + } + } + } + + async countSessions() { + return this.sessionModel.count(); + } + + async searchSessions(keyword: string) { + return this.sessionModel.queryByKeyword(keyword); + } + + async updateSession(id: string, value: Partial) { + return this.sessionModel.update(id, { + ...value, + groupId: value.group === 'default' ? null : value.group, + }); + } + + async updateSessionConfig( + activeId: string, + config: DeepPartial, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + _?: AbortSignal, + ) { + const session = await this.sessionModel.findByIdOrSlug(activeId); + if (!session || !config) return; + + return this.sessionModel.updateConfig(session.agent.id, config as AgentItem); + } + + async updateSessionMeta( + activeId: string, + meta: Partial, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + _?: AbortSignal, + ) { + // inbox 不允许修改 meta + if (activeId === INBOX_SESSION_ID) return; + + return this.sessionModel.update(activeId, meta); + } + + async updateSessionChatConfig( + activeId: string, + config: DeepPartial, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + _?: AbortSignal, + ) { + return this.updateSessionConfig(activeId, { chatConfig: config }); + } + + async removeSession(id: string) { + return this.sessionModel.delete(id); + } + + async removeAllSessions() { + return this.sessionModel.deleteAll(); + } + + // ************************************** // + // *********** SessionGroup *********** // + // ************************************** // + + async createSessionGroup(name: string, sort?: number) { + const item = await this.sessionGroupModel.create({ name, sort }); + if (!item) { + throw new Error('session group create Error'); + } + + return item.id; + } + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + async batchCreateSessionGroups(_groups: SessionGroups) { + return { added: 0, ids: [], skips: [], success: true }; + } + + async removeSessionGroup(id: string) { + return await this.sessionGroupModel.delete(id); + } + + async updateSessionGroup(id: string, data: Partial) { + return this.sessionGroupModel.update(id, data); + } + + async updateSessionGroupOrder(sortMap: { id: string; sort: number }[]) { + return this.sessionGroupModel.updateOrder(sortMap); + } + + async getSessionGroups(): Promise { + return this.sessionGroupModel.query(); + } + + async removeSessionGroups() { + return this.sessionGroupModel.deleteAll(); + } +} diff --git a/src/services/session/type.ts b/src/services/session/type.ts index f51f2f39780d..59528fe138ca 100644 --- a/src/services/session/type.ts +++ b/src/services/session/type.ts @@ -16,13 +16,21 @@ import { export interface ISessionService { createSession(type: LobeSessionType, defaultValue: Partial): Promise; + + /** + * 需要废弃 + * @deprecated + */ batchCreateSessions(importSessions: LobeSessions): Promise; cloneSession(id: string, newTitle: string): Promise; getGroupedSessions(): Promise; + + /** + * @deprecated + */ getSessionsByType(type: 'agent' | 'group' | 'all'): Promise; countSessions(): Promise; - hasSessions(): Promise; searchSessions(keyword: string): Promise; updateSession( @@ -53,6 +61,11 @@ export interface ISessionService { // ************************************** // createSessionGroup(name: string, sort?: number): Promise; + + /** + * 需要废弃 + * @deprecated + */ batchCreateSessionGroups(groups: SessionGroups): Promise; getSessionGroups(): Promise; diff --git a/src/services/topic/index.ts b/src/services/topic/index.ts index 360656149ea5..1e1847a18344 100644 --- a/src/services/topic/index.ts +++ b/src/services/topic/index.ts @@ -1,6 +1,9 @@ - -import { ClientService } from './client'; +import { ClientService as DeprecatedService } from './client'; +import { ClientService } from './pglite'; import { ServerService } from './server'; +const clientService = + process.env.NEXT_PUBLIC_CLIENT_DB === 'pglite' ? new ClientService() : new DeprecatedService(); + export const topicService = - process.env.NEXT_PUBLIC_SERVICE_MODE === 'server' ? new ServerService() : new ClientService(); + process.env.NEXT_PUBLIC_SERVICE_MODE === 'server' ? new ServerService() : clientService; diff --git a/src/services/topic/pglite.test.ts b/src/services/topic/pglite.test.ts new file mode 100644 index 000000000000..c900de550443 --- /dev/null +++ b/src/services/topic/pglite.test.ts @@ -0,0 +1,212 @@ +import { eq } from 'drizzle-orm'; +import { beforeEach, describe, expect, it, vi } from 'vitest'; + +import { clientDB, initializeDB } from '@/database/client/db'; +import { sessions, topics, users } from '@/database/schemas'; +import { ChatTopic } from '@/types/topic'; + +import { ClientService } from './pglite'; + +// Mock data +const userId = 'topic-user-test'; +const sessionId = 'topic-session'; +const mockTopicId = 'mock-topic-id'; + +const mockTopic = { + id: mockTopicId, + title: 'Mock Topic', +}; + +const topicService = new ClientService(userId); + +beforeEach(async () => { + await initializeDB(); + + await clientDB.delete(users); + + // 创建测试数据 + await clientDB.transaction(async (tx) => { + await tx.insert(users).values({ id: userId }); + await tx.insert(sessions).values({ id: sessionId, userId }); + await tx.insert(topics).values({ ...mockTopic, sessionId, userId }); + }); +}); + +describe('TopicService', () => { + describe('createTopic', () => { + it('should create a topic and return its id', async () => { + // Setup + const createParams = { + title: 'New Topic', + sessionId: sessionId, + }; + + // Execute + const topicId = await topicService.createTopic(createParams); + + // Assert + expect(topicId).toBeDefined(); + }); + + it('should throw an error if topic creation fails', async () => { + // Setup + const createParams = { + title: 'New Topic', + sessionId: 123 as any, // sessionId should be string + }; + + // Execute & Assert + await expect(topicService.createTopic(createParams)).rejects.toThrowError(); + }); + }); + + describe('getTopics', () => { + // Example for getTopics + it('should query topics with given parameters', async () => { + // Setup + const queryParams = { sessionId }; + + // Execute + const data = await topicService.getTopics(queryParams); + + // Assert + expect(data[0]).toMatchObject(mockTopic); + }); + }); + + describe('updateTopic', () => { + // Example for updateFavorite + it('should toggle favorite status of a topic', async () => { + // Execute + const result = await topicService.updateTopic(mockTopicId, { favorite: true }); + + // Assert + expect(result[0].favorite).toBeTruthy(); + }); + + it('should update the title of a topic', async () => { + // Setup + const newTitle = 'Updated Topic Title'; + + // Execute + const result = await topicService.updateTopic(mockTopicId, { title: newTitle }); + + // Assert + expect(result[0].title).toEqual(newTitle); + }); + }); + + describe('removeTopic', () => { + it('should remove a topic by id', async () => { + // Execute + await topicService.removeTopic(mockTopicId); + const result = await clientDB.query.topics.findFirst({ where: eq(topics.id, mockTopicId) }); + + // Assert + expect(result).toBeUndefined(); + }); + }); + + describe('removeTopics', () => { + it('should remove all topics with a given session id', async () => { + // Setup + const sessionId = 'session-id'; + + // Execute + await topicService.removeTopics(sessionId); + const result = await clientDB.query.topics.findMany({ + where: eq(topics.sessionId, sessionId), + }); + + expect(result.length).toEqual(0); + }); + }); + + describe('batchRemoveTopics', () => { + it('should batch remove topics', async () => { + await clientDB.insert(topics).values([{ id: 'topic-id-1', title: 'topic-title', userId }]); + // Setup + const topicIds = [mockTopicId, 'another-topic-id']; + + // Execute + await topicService.batchRemoveTopics(topicIds); + + const count = await clientDB.$count(topics); + + // Assert + expect(count).toBe(1); + }); + }); + + describe('removeAllTopic', () => { + it('should clear all topics from the table', async () => { + // Execute + await topicService.removeAllTopic(); + + const count = await clientDB.$count(topics); + // Assert + expect(count).toBe(0); + }); + }); + + describe('batchCreateTopics', () => { + it('should batch create topics', async () => { + // Execute + const result = await topicService.batchCreateTopics([ + { id: 'topic-id-1', title: 'topic-title' }, + { id: 'topic-id-2', title: 'topic-title' }, + ] as ChatTopic[]); + + // Assert + expect(result.success).toBeTruthy(); + expect(result.added).toBe(2); + }); + }); + + describe('getAllTopics', () => { + it('should retrieve all topics', async () => { + await clientDB.insert(topics).values([ + { id: 'topic-id-1', title: 'topic-title', userId }, + { id: 'topic-id-2', title: 'topic-title', userId }, + ]); + // Execute + const result = await topicService.getAllTopics(); + + // Assert + expect(result.length).toEqual(3); + }); + }); + + describe('searchTopics', () => { + it('should return all topics that match the keyword', async () => { + // Setup + const keyword = 'Topic'; + + // Execute + const result = await topicService.searchTopics(keyword, sessionId); + + // Assert + expect(result.length).toEqual(1); + }); + it('should return empty topic if not match the keyword', async () => { + // Setup + const keyword = 'search'; + + // Execute + const result = await topicService.searchTopics(keyword, sessionId); + + // Assert + expect(result.length).toEqual(0); + }); + }); + + describe('countTopics', () => { + it('should return topic counts', async () => { + // Execute + const result = await topicService.countTopics(); + + // Assert + expect(result).toBe(1); + }); + }); +}); diff --git a/src/services/topic/pglite.ts b/src/services/topic/pglite.ts new file mode 100644 index 000000000000..542eafe9ff2c --- /dev/null +++ b/src/services/topic/pglite.ts @@ -0,0 +1,85 @@ +import { INBOX_SESSION_ID } from '@/const/session'; +import { clientDB } from '@/database/client/db'; +import { TopicModel } from '@/database/server/models/topic'; +import { BaseClientService } from '@/services/baseClientService'; +import { ChatTopic } from '@/types/topic'; + +import { CreateTopicParams, ITopicService, QueryTopicParams } from './type'; + +export class ClientService extends BaseClientService implements ITopicService { + private get topicModel(): TopicModel { + return new TopicModel(clientDB as any, this.userId); + } + + async createTopic(params: CreateTopicParams): Promise { + const item = await this.topicModel.create({ + ...params, + sessionId: this.toDbSessionId(params.sessionId), + } as any); + + if (!item) { + throw new Error('topic create Error'); + } + + return item.id; + } + + async batchCreateTopics(importTopics: ChatTopic[]) { + const data = await this.topicModel.batchCreate(importTopics as any); + + return { added: data.length, ids: [], skips: [], success: true }; + } + + async cloneTopic(id: string, newTitle?: string) { + const data = await this.topicModel.duplicate(id, newTitle); + return data.topic.id; + } + + async getTopics(params: QueryTopicParams) { + const data = await this.topicModel.query({ + ...params, + sessionId: this.toDbSessionId(params.sessionId), + }); + return data as unknown as Promise; + } + + async searchTopics(keyword: string, sessionId?: string) { + const data = await this.topicModel.queryByKeyword(keyword, this.toDbSessionId(sessionId)); + + return data as unknown as Promise; + } + + async getAllTopics() { + const data = await this.topicModel.queryAll(); + + return data as unknown as Promise; + } + + async countTopics() { + return this.topicModel.count(); + } + + async updateTopic(id: string, data: Partial) { + return this.topicModel.update(id, data as any); + } + + async removeTopic(id: string) { + return this.topicModel.delete(id); + } + + async removeTopics(sessionId: string) { + return this.topicModel.batchDeleteBySessionId(this.toDbSessionId(sessionId)); + } + + async batchRemoveTopics(topics: string[]) { + return this.topicModel.batchDelete(topics); + } + + async removeAllTopic() { + return this.topicModel.deleteAll(); + } + + private toDbSessionId(sessionId?: string | null) { + return sessionId === INBOX_SESSION_ID ? null : sessionId; + } +} diff --git a/src/services/user/client.test.ts b/src/services/user/client.test.ts index f79f8294a2f5..5869d58d54ee 100644 --- a/src/services/user/client.test.ts +++ b/src/services/user/client.test.ts @@ -4,7 +4,6 @@ import { Mock, beforeEach, describe, expect, it, vi } from 'vitest'; import { UserModel } from '@/database/_deprecated/models/user'; import { UserPreference } from '@/types/user'; import { UserSettings } from '@/types/user/settings'; -import { AsyncLocalStorage } from '@/utils/localStorage'; import { ClientService } from './client'; diff --git a/src/services/user/index.ts b/src/services/user/index.ts index e472bdaef2c0..a7d3b15df07b 100644 --- a/src/services/user/index.ts +++ b/src/services/user/index.ts @@ -1,5 +1,11 @@ -import { ClientService } from './client'; +import { ClientService as DeprecatedService } from './client'; +import { ClientService } from './pglite'; import { ServerService } from './server'; +const clientService = + process.env.NEXT_PUBLIC_CLIENT_DB === 'pglite' ? new ClientService() : new DeprecatedService(); + export const userService = - process.env.NEXT_PUBLIC_SERVICE_MODE === 'server' ? new ServerService() : new ClientService(); + process.env.NEXT_PUBLIC_SERVICE_MODE === 'server' ? new ServerService() : clientService; + +export const userClientService = clientService; diff --git a/src/services/user/pglite.test.ts b/src/services/user/pglite.test.ts new file mode 100644 index 000000000000..cab8a0741d03 --- /dev/null +++ b/src/services/user/pglite.test.ts @@ -0,0 +1,98 @@ +import { eq } from 'drizzle-orm'; +import { DeepPartial } from 'utility-types'; +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; + +import { clientDB, initializeDB } from '@/database/client/db'; +import { userSettings, users } from '@/database/schemas'; +import { UserPreference } from '@/types/user'; +import { UserSettings } from '@/types/user/settings'; + +import { ClientService } from './pglite'; + +const mockUser = { + avatar: 'avatar.png', + settings: { themeMode: 'light' } as unknown as UserSettings, + uuid: 'user-id', +}; + +const mockPreference = { + useCmdEnterToSend: true, +} as UserPreference; +const clientService = new ClientService(mockUser.uuid); + +beforeEach(async () => { + vi.clearAllMocks(); + + await initializeDB(); + await clientDB.delete(users); + + await clientDB.insert(users).values({ id: mockUser.uuid, avatar: 'avatar.png' }); + await clientDB + .insert(userSettings) + .values({ id: mockUser.uuid, general: { themeMode: 'light' } }); +}); + +describe('ClientService', () => { + it('should get user state correctly', async () => { + const spyOn = vi + .spyOn(clientService['preferenceStorage'], 'getFromLocalStorage') + .mockResolvedValue(mockPreference); + + const userState = await clientService.getUserState(); + + expect(userState).toMatchObject({ + avatar: mockUser.avatar, + isOnboard: true, + canEnablePWAGuide: false, + hasConversation: false, + canEnableTrace: false, + preference: mockPreference, + settings: { general: { themeMode: 'light' } }, + userId: mockUser.uuid, + }); + expect(spyOn).toHaveBeenCalledTimes(1); + }); + + it('should update user settings correctly', async () => { + const settingsPatch: DeepPartial = { general: { themeMode: 'dark' } }; + + await clientService.updateUserSettings(settingsPatch); + + const result = await clientDB.query.userSettings.findFirst({ + where: eq(userSettings.id, mockUser.uuid), + }); + + expect(result).toMatchObject(settingsPatch); + }); + + it('should reset user settings correctly', async () => { + await clientService.resetUserSettings(); + + const result = await clientDB.query.userSettings.findFirst({ + where: eq(userSettings.id, mockUser.uuid), + }); + + expect(result).toBeUndefined(); + }); + + it('should update user avatar correctly', async () => { + const newAvatar = 'new-avatar.png'; + + await clientService.updateAvatar(newAvatar); + }); + + it('should update user preference correctly', async () => { + const newPreference = { + useCmdEnterToSend: false, + } as UserPreference; + + const spyOn = vi + .spyOn(clientService['preferenceStorage'], 'saveToLocalStorage') + .mockResolvedValue(undefined); + + await clientService.updatePreference(newPreference); + + expect(spyOn).toHaveBeenCalledWith(newPreference); + expect(spyOn).toHaveBeenCalledTimes(1); + }); +}); diff --git a/src/services/user/pglite.ts b/src/services/user/pglite.ts new file mode 100644 index 000000000000..b28656433833 --- /dev/null +++ b/src/services/user/pglite.ts @@ -0,0 +1,92 @@ +import { DeepPartial } from 'utility-types'; + +import { clientDB } from '@/database/client/db'; +import { users } from '@/database/schemas'; +import { MessageModel } from '@/database/server/models/message'; +import { SessionModel } from '@/database/server/models/session'; +import { UserModel } from '@/database/server/models/user'; +import { BaseClientService } from '@/services/baseClientService'; +import { UserGuide, UserInitializationState, UserPreference } from '@/types/user'; +import { UserSettings } from '@/types/user/settings'; +import { AsyncLocalStorage } from '@/utils/localStorage'; + +import { IUserService } from './type'; + +export class ClientService extends BaseClientService implements IUserService { + private preferenceStorage: AsyncLocalStorage; + + private get userModel(): UserModel { + return new UserModel(clientDB as any, this.userId); + } + private get messageModel(): MessageModel { + return new MessageModel(clientDB as any, this.userId); + } + private get sessionModel(): SessionModel { + return new SessionModel(clientDB as any, this.userId); + } + + constructor(userId?: string) { + super(userId); + this.preferenceStorage = new AsyncLocalStorage('LOBE_PREFERENCE'); + } + + async getUserState(): Promise { + // if user not exist in the db, create one to make sure the user exist + await this.makeSureUserExist(); + + const state = await this.userModel.getUserState((encryptKeyVaultsStr) => + encryptKeyVaultsStr ? JSON.parse(encryptKeyVaultsStr) : {}, + ); + + const user = await UserModel.findById(clientDB as any, this.userId); + const messageCount = await this.messageModel.count(); + const sessionCount = await this.sessionModel.count(); + + return { + ...state, + avatar: user?.avatar as string, + canEnablePWAGuide: messageCount >= 4, + canEnableTrace: messageCount >= 4, + hasConversation: messageCount > 0 || sessionCount > 0, + isOnboard: true, + preference: await this.preferenceStorage.getFromLocalStorage(), + }; + } + + updateUserSettings = async (value: DeepPartial) => { + const { keyVaults, ...res } = value; + + return this.userModel.updateSetting({ ...res, keyVaults: JSON.stringify(keyVaults) }); + }; + + resetUserSettings = async () => { + return this.userModel.deleteSetting(); + }; + + async updateAvatar(avatar: string) { + await this.userModel.updateUser({ avatar }); + } + + async updatePreference(preference: Partial) { + await this.preferenceStorage.saveToLocalStorage(preference); + } + + // eslint-disable-next-line @typescript-eslint/no-unused-vars,unused-imports/no-unused-vars + async updateGuide(guide: Partial) { + throw new Error('Method not implemented.'); + } + + async makeSureUserExist() { + const existUsers = await clientDB.query.users.findMany(); + + let user: { id: string }; + if (existUsers.length === 0) { + const result = await clientDB.insert(users).values({ id: this.userId }).returning(); + user = result[0]; + } else { + user = existUsers[0]; + } + + return user; + } +} diff --git a/src/store/global/actions/clientDb.ts b/src/store/global/actions/clientDb.ts new file mode 100644 index 000000000000..ed89041d5a54 --- /dev/null +++ b/src/store/global/actions/clientDb.ts @@ -0,0 +1,51 @@ +import { SWRResponse } from 'swr'; +import type { StateCreator } from 'zustand/vanilla'; + +import { useOnlyFetchOnceSWR } from '@/libs/swr'; +import type { GlobalStore } from '@/store/global'; +import { DatabaseLoadingState, OnStageChange } from '@/types/clientDB'; + +type InitClientDBParams = { onStateChange: OnStageChange }; +/** + * 设置操作 + */ +export interface GlobalClientDBAction { + initializeClientDB: (params?: InitClientDBParams) => Promise; + markPgliteEnabled: () => void; + useInitClientDB: (params?: InitClientDBParams) => SWRResponse; +} + +export const clientDBSlice: StateCreator< + GlobalStore, + [['zustand/devtools', never]], + [], + GlobalClientDBAction +> = (set, get) => ({ + initializeClientDB: async (params) => { + // if the db has started initialized or not error, just skip. + if ( + get().initClientDBStage !== DatabaseLoadingState.Idle && + get().initClientDBStage !== DatabaseLoadingState.Error + ) + return; + + const { initializeDB } = await import('@/database/client/db'); + await initializeDB({ + onError: (error) => { + set({ initClientDBError: error }); + }, + onProgress: (data) => { + set({ initClientDBProcess: data }); + }, + onStateChange: (state) => { + set({ initClientDBStage: state }); + params?.onStateChange?.(state); + }, + }); + }, + markPgliteEnabled: () => { + get().updateSystemStatus({ isEnablePglite: true }); + }, + useInitClientDB: (params) => + useOnlyFetchOnceSWR('initClientDB', () => get().initializeClientDB(params)), +}); diff --git a/src/store/global/initialState.ts b/src/store/global/initialState.ts index 5da29090f9a3..681f69950b1c 100644 --- a/src/store/global/initialState.ts +++ b/src/store/global/initialState.ts @@ -1,5 +1,6 @@ import { AppRouterInstance } from 'next/dist/shared/lib/app-router-context.shared-runtime'; +import { DatabaseLoadingState } from '@/types/clientDB'; import { SessionDefaultGroup } from '@/types/session'; import { AsyncLocalStorage } from '@/utils/localStorage'; @@ -37,6 +38,10 @@ export interface SystemStatus { hidePWAInstaller?: boolean; hideThreadLimitAlert?: boolean; inputHeight: number; + /** + * 应用初始化时不启用 PGLite,只有当用户手动开启时才启用 + */ + isEnablePglite?: boolean; mobileShowPortal?: boolean; mobileShowTopic?: boolean; sessionsWidth: number; @@ -50,6 +55,13 @@ export interface SystemStatus { export interface GlobalState { hasNewVersion?: boolean; + initClientDBError?: Error; + initClientDBProcess?: { costTime?: number; phase: 'wasm' | 'dependencies'; progress: number }; + /** + * 客户端数据库初始化状态 + * 启动时为 Idle,完成为 Ready,报错为 Error + */ + initClientDBStage: DatabaseLoadingState; isMobile?: boolean; isStatusInit?: boolean; latestVersion?: string; @@ -76,6 +88,7 @@ export const INITIAL_STATUS = { } satisfies SystemStatus; export const initialState: GlobalState = { + initClientDBStage: DatabaseLoadingState.Idle, isMobile: false, isStatusInit: false, sidebarKey: SidebarTabKey.Chat, diff --git a/src/store/global/selectors.ts b/src/store/global/selectors.ts index 65936ca26bb7..f75087dfab32 100644 --- a/src/store/global/selectors.ts +++ b/src/store/global/selectors.ts @@ -1,4 +1,6 @@ +import { isServerMode, isUsePgliteDB } from '@/const/version'; import { GlobalStore } from '@/store/global'; +import { DatabaseLoadingState } from '@/types/clientDB'; import { INITIAL_STATUS } from './initialState'; @@ -22,17 +24,36 @@ const filePanelWidth = (s: GlobalStore) => s.status.filePanelWidth; const inputHeight = (s: GlobalStore) => s.status.inputHeight; const threadInputHeight = (s: GlobalStore) => s.status.threadInputHeight; -const isPgliteNotEnabled = () => false; +const isPgliteNotEnabled = (s: GlobalStore) => + isUsePgliteDB && !isServerMode && s.isStatusInit && !s.status.isEnablePglite; -const isPgliteNotInited = () => false; +/** + * 当且仅当 client db 模式,且 pglite 未初始化完成时返回 true + */ +const isPgliteNotInited = (s: GlobalStore) => + isUsePgliteDB && + s.isStatusInit && + s.status.isEnablePglite && + s.initClientDBStage !== DatabaseLoadingState.Ready; -const isPgliteInited = (): boolean => true; +/** + * 当且仅当 client db 模式,且 pglite 初始化完成时返回 true + */ +const isPgliteInited = (s: GlobalStore): boolean => + (s.isStatusInit && + s.status.isEnablePglite && + s.initClientDBStage === DatabaseLoadingState.Ready) || + false; + +// 这个变量控制 clientdb 是否完成初始化,正常来说,只有 pgliteDB 模式下,才会存在变化,其他时候都是 true +const isDBInited = (s: GlobalStore): boolean => (isUsePgliteDB ? isPgliteInited(s) : true); export const systemStatusSelectors = { filePanelWidth, hidePWAInstaller, inZenMode, inputHeight, + isDBInited, isPgliteInited, isPgliteNotEnabled, isPgliteNotInited, diff --git a/src/store/global/store.ts b/src/store/global/store.ts index 5bf1ac439f19..ed3f28e9389a 100644 --- a/src/store/global/store.ts +++ b/src/store/global/store.ts @@ -5,15 +5,17 @@ import { StateCreator } from 'zustand/vanilla'; import { createDevtools } from '../middleware/createDevtools'; import { type GlobalStoreAction, globalActionSlice } from './action'; +import { type GlobalClientDBAction, clientDBSlice } from './actions/clientDb'; import { type GlobalState, initialState } from './initialState'; // =============== 聚合 createStoreFn ============ // -export type GlobalStore = GlobalState & GlobalStoreAction; +export type GlobalStore = GlobalState & GlobalStoreAction & GlobalClientDBAction; const createStore: StateCreator = (...parameters) => ({ ...initialState, ...globalActionSlice(...parameters), + ...clientDBSlice(...parameters), }); // =============== 实装 useStore ============ // diff --git a/src/store/user/slices/common/action.ts b/src/store/user/slices/common/action.ts index 5f523d8d7dde..ad8487ed3770 100644 --- a/src/store/user/slices/common/action.ts +++ b/src/store/user/slices/common/action.ts @@ -46,11 +46,9 @@ export const createCommonSlice: StateCreator< await mutate(GET_USER_STATE_KEY); }, updateAvatar: async (avatar) => { - const { ClientService } = await import('@/services/user/client'); + const { userClientService } = await import('@/services/user'); - const clientService = new ClientService(); - - await clientService.updateAvatar(avatar); + await userClientService.updateAvatar(avatar); await get().refreshUserState(); },
{item.title}
{item.desc}