Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions electron.vite.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,9 @@ export default defineConfig({
svgLoader(),
vueDevTools()
],
worker: {
format: 'es'
},
build: {
minify: 'esbuild',
// Ensure CSS order in build matches import order in dev
Expand Down
8 changes: 0 additions & 8 deletions src/main/presenter/llmProviderPresenter/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1674,14 +1674,6 @@ export class LLMProviderPresenter implements ILlmProviderPresenter {
})
})
}
deleteOllamaModel(modelName: string): Promise<boolean> {
const provider = this.getOllamaProviderInstance()
if (!provider) {
throw new Error('Ollama provider not found')
}
return provider.deleteModel(modelName)
}

/**
* 获取文本的 embedding 表示
* @param providerId 提供商ID
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -380,18 +380,6 @@ export class OllamaProvider extends BaseLLMProvider {
}
}

public async deleteModel(modelName: string): Promise<boolean> {
try {
await this.ollama.delete({
model: modelName
})
return true
} catch (error) {
console.error(`Failed to delete Ollama model ${modelName}:`, (error as Error).message)
return false
}
}

public async showModelInfo(modelName: string): Promise<ShowResponse> {
try {
const response = await this.ollama.show({
Expand Down
61 changes: 0 additions & 61 deletions src/renderer/settings/components/OllamaProviderSettingsDetail.vue
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,6 @@
:model-name="model.name"
:model-id="model.meta?.id ?? model.name"
:provider-id="provider.id"
:is-custom-model="true"
:type="model.type"
:enabled="model.enabled"
:vision="model.vision"
Expand All @@ -140,7 +139,6 @@
:enable-search="model.enableSearch"
@enabled-change="handleModelEnabledChange(model.name, $event)"
@config-changed="refreshModels"
@delete-model="showDeleteModelConfirm(model.name)"
/>
</template>
<template v-else>
Expand Down Expand Up @@ -208,26 +206,6 @@
</DialogContent>
</Dialog>

<!-- 删除模型确认对话框 -->
<Dialog v-model:open="showDeleteModelDialog">
<DialogContent>
<DialogHeader>
<DialogTitle>{{ t('settings.provider.dialog.deleteModel.title') }}</DialogTitle>
<DialogDescription>
{{ t('settings.provider.dialog.deleteModel.content', { name: modelToDelete }) }}
</DialogDescription>
</DialogHeader>
<DialogFooter>
<Button variant="outline" @click="showDeleteModelDialog = false">
{{ t('dialog.cancel') }}
</Button>
<Button variant="destructive" @click="confirmDeleteModel">
{{ t('settings.provider.dialog.deleteModel.confirm') }}
</Button>
</DialogFooter>
</DialogContent>
</Dialog>

<!-- 检查模型对话框 -->
<Dialog v-model:open="showCheckModelDialog">
<DialogContent>
Expand Down Expand Up @@ -281,11 +259,9 @@ import { useSettingsStore } from '@/stores/settings'
import { useModelCheckStore } from '@/stores/modelCheck'
import type { LLM_PROVIDER, RENDERER_MODEL_META } from '@shared/presenter'
import ModelConfigItem from '@/components/settings/ModelConfigItem.vue'
import { useToast } from '@/components/use-toast'
import { ModelType } from '@shared/model'

const { t } = useI18n()
const { toast } = useToast()

const props = defineProps<{
provider: LLM_PROVIDER
Expand All @@ -297,8 +273,6 @@ const apiHost = ref(props.provider.baseUrl || '')
const apiKey = ref(props.provider.apiKey || '')
const showApiKey = ref(false)
const showPullModelDialog = ref(false)
const showDeleteModelDialog = ref(false)
const modelToDelete = ref('')
const showCheckModelDialog = ref(false)
const checkResult = ref<boolean>(false)

Expand Down Expand Up @@ -856,21 +830,6 @@ const pullModel = async (modelName: string) => {
}
}

// 显示删除模型确认对话框
const showDeleteModelConfirm = (modelName: string) => {
if (isModelRunning(modelName)) {
toast({
title: t('settings.provider.toast.modelRunning'),
description: t('settings.provider.toast.modelRunningDesc', { model: modelName }),
variant: 'destructive',
duration: 3000
})
return
}
modelToDelete.value = modelName
showDeleteModelDialog.value = true
}

const handleModelEnabledChange = async (modelName: string, enabled: boolean) => {
try {
await settingsStore.updateModelStatus(props.provider.id, modelName, enabled)
Expand All @@ -879,22 +838,6 @@ const handleModelEnabledChange = async (modelName: string, enabled: boolean) =>
}
}

// 确认删除模型 - 使用 settings store
const confirmDeleteModel = async () => {
if (!modelToDelete.value) return

try {
const success = await settingsStore.deleteOllamaModel(modelToDelete.value)
if (success) {
// 删除成功后模型列表会自动刷新,无需额外调用 refreshModels
}
showDeleteModelDialog.value = false
modelToDelete.value = ''
} catch (error) {
console.error(`Failed to delete model ${modelToDelete.value}:`, error)
}
}

// 工具函数
const formatModelSize = (sizeInBytes: number): string => {
if (!sizeInBytes) return ''
Expand All @@ -914,10 +857,6 @@ const formatModelSize = (sizeInBytes: number): string => {
}

// 使用 settings store 的辅助函数
const isModelRunning = (modelName: string): boolean => {
return settingsStore.isOllamaModelRunning(modelName)
}

const isModelLocal = (modelName: string): boolean => {
return settingsStore.isOllamaModelLocal(modelName)
}
Expand Down
17 changes: 0 additions & 17 deletions src/renderer/src/stores/settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1335,22 +1335,6 @@ export const useSettingsStore = defineStore('settings', () => {
}
}

/**
* 删除 Ollama 模型
*/
const deleteOllamaModel = async (modelName: string): Promise<boolean> => {
try {
const success = await llmP.deleteOllamaModel(modelName)
if (success) {
await refreshOllamaModels()
}
return success
} catch (error) {
console.error(`Failed to delete Ollama model ${modelName}:`, error)
return false
}
}

/**
* 处理 Ollama 模型拉取事件
*/
Expand Down Expand Up @@ -1792,7 +1776,6 @@ export const useSettingsStore = defineStore('settings', () => {
ollamaPullingModels,
refreshOllamaModels,
pullOllamaModel,
deleteOllamaModel,
isOllamaModelRunning,
isOllamaModelLocal,
getOllamaPullingModels,
Expand Down
1 change: 0 additions & 1 deletion src/shared/types/presenters/legacy.presenters.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -682,7 +682,6 @@ export interface ILlmProviderPresenter {
showOllamaModelInfo(modelName: string): Promise<ShowResponse>
listOllamaRunningModels(): Promise<OllamaModel[]>
pullOllamaModels(modelName: string): Promise<boolean>
deleteOllamaModel(modelName: string): Promise<boolean>
getEmbeddings(providerId: string, modelId: string, texts: string[]): Promise<number[][]>
getDimensions(
providerId: string,
Expand Down
1 change: 0 additions & 1 deletion src/shared/types/presenters/llmprovider.presenter.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,6 @@ export interface ILlmProviderPresenter {
showOllamaModelInfo(modelName: string): Promise<ShowResponse>
listOllamaRunningModels(): Promise<OllamaModel[]>
pullOllamaModels(modelName: string): Promise<boolean>
deleteOllamaModel(modelName: string): Promise<boolean>
getEmbeddings(providerId: string, modelId: string, texts: string[]): Promise<number[][]>
getDimensions(
providerId: string,
Expand Down