Skip to content

Commit 43e4367

Browse files
committed
add del button
1 parent ae4cbf3 commit 43e4367

File tree

1 file changed

+64
-10
lines changed

1 file changed

+64
-10
lines changed

src/Chat.js

Lines changed: 64 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { Code, Github, MinimizeIcon, Send, Upload } from "lucide-react";
1+
import { Code, Github, XCircle, Database, MinimizeIcon, Send, Upload, Trash2 } from "lucide-react";
22
import React, { useEffect, useRef, useState } from "react";
33
import { IFRAME_TEMPLATE, LLM_HTML_MODEL_CONFIG, LLM_VISION_MODEL_CONFIG } from "./constants/chat.js";
44
import { useLLMHtmlGeneration } from "./hooks/useLLMHtmlGeneration.js";
@@ -14,9 +14,12 @@ export function Chat() {
1414
const [selectedImage, setSelectedImage] = useState(null);
1515
const [selectedImageURL, setSelectedImageURL] = useState(null);
1616
const [isCodePanelOpen, setIsCodePanelOpen] = useState(false);
17+
const [showWarning, setShowWarning] = useState(true);
1718
const iframeRef = useRef(null);
1819
const fileInputRef = useRef(null);
1920
const [buildStage, setBuildStage] = useState(0);
21+
const [hasCache, setHasCache] = useState(false);
22+
const [cacheSize, setCacheSize] = useState(0);
2023

2124
let generateText;
2225
let isGenerating;
@@ -31,6 +34,36 @@ export function Chat() {
3134
}
3235
const [currentMessageId, setCurrentMessageId] = useState(null);
3336

37+
const checkCache = async () => {
38+
try {
39+
const keys = await caches.keys();
40+
if (keys.length > 0) {
41+
setHasCache(true);
42+
const estimate = await navigator.storage.estimate();
43+
const usedBytes = estimate.usage || 0;
44+
setCacheSize(Math.round(usedBytes / (1024 * 1024)));
45+
}
46+
} catch (error) {
47+
console.error("Failed to check cache:", error);
48+
}
49+
};
50+
51+
const clearModelCache = async () => {
52+
try {
53+
const keys = await caches.keys();
54+
await Promise.all(keys.map(key => caches.delete(key)));
55+
setHasCache(false);
56+
setCacheSize(0);
57+
window.location.reload();
58+
} catch (error) {
59+
console.error("Failed to clear cache:", error);
60+
}
61+
};
62+
63+
useEffect(() => {
64+
checkCache();
65+
}, []);
66+
3467
useEffect(() => {
3568
if (generatedText) {
3669
iframeRef.current?.contentWindow?.postMessage(
@@ -67,6 +100,12 @@ export function Chat() {
67100
e.preventDefault();
68101
if ((!input.trim() && !selectedImage) || isGenerating) return;
69102

103+
if (showWarning) {
104+
const proceed = window.confirm("Warning: Using this chat will download AI models larger than 1GB in size. Do you want to continue?");
105+
if (!proceed) return;
106+
setShowWarning(false);
107+
}
108+
70109
const userMessage = {
71110
role: "user",
72111
content: input,
@@ -87,15 +126,30 @@ export function Chat() {
87126
"div",
88127
{ className: "min-h-screen bg-gray-950 text-gray-200" },
89128
h(
90-
"a",
91-
{
92-
href: "https://github.com/pdufour/llm-coder",
93-
target: "_blank",
94-
rel: "noopener noreferrer",
95-
className: "fixed top-4 right-4 z-20 bg-gray-900/80 backdrop-blur-sm rounded-full px-4 py-2 text-gray-300 hover:text-white hover:bg-gray-800 transition-colors flex items-center gap-2",
96-
},
97-
h(Github, { className: "w-5 h-5" }),
98-
"GitHub Repo"
129+
"div",
130+
{ className: "fixed top-4 right-4 z-20 flex items-center gap-2" },
131+
hasCache && h(
132+
"button",
133+
{
134+
onClick: clearModelCache,
135+
136+
className: "bg-gray-900/80 backdrop-blur-sm rounded-full p-2 text-gray-300 hover:text-white hover:bg-gray-800 transition-colors flex items-center gap-2",
137+
title: `Clear cached models (${cacheSize}MB)`
138+
},
139+
h(XCircle, { className: "w-5 h-5" }),
140+
`Delete Cache ${cacheSize}MB`
141+
),
142+
h(
143+
"a",
144+
{
145+
href: "https://github.com/pdufour/llm-coder",
146+
target: "_blank",
147+
rel: "noopener noreferrer",
148+
className: "bg-gray-900/80 backdrop-blur-sm rounded-full px-4 py-2 text-gray-300 hover:text-white hover:bg-gray-800 transition-colors flex items-center gap-2",
149+
},
150+
h(Github, { className: "w-5 h-5" }),
151+
"GitHub"
152+
)
99153
),
100154
!isGenerating && !messages.length
101155
? h(

0 commit comments

Comments
 (0)