Skip to content

Commit e8cb0c5

Browse files
authored
Enhance AI message streaming and error handling
Refactor AI message handling and error management with streaming support.
1 parent f811f15 commit e8cb0c5

File tree

1 file changed

+90
-65
lines changed

1 file changed

+90
-65
lines changed

src/main.js

Lines changed: 90 additions & 65 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ import style from "./style.scss";
44
import { StringOutputParser } from "@langchain/core/output_parsers";
55
import { ChatPromptTemplate } from "@langchain/core/prompts";
66
import { ChatOpenAI } from "@langchain/openai";
7+
import { CallbackManager } from "langchain/callbacks";
78

89
import copy from "copy-to-clipboard";
910
import { v4 as uuidv4 } from "uuid";
@@ -400,8 +401,6 @@ class AIAssistant {
400401
const sendBtn = app.querySelector(".ai-send-btn");
401402
if (sendBtn) sendBtn.innerHTML = stopIconSvg;
402403

403-
let aiMessageElement = null;
404-
405404
try {
406405
const settings = await this.getSettings();
407406
if (!settings.apiKey || !settings.baseUrl || !settings.model) {
@@ -410,38 +409,75 @@ class AIAssistant {
410409
return;
411410
}
412411

413-
aiMessageElement = this.addMessageToChat("assistant", "", app);
412+
const aiMessageElement = this.addMessageToChat("assistant", "", app);
413+
let streamedText = "";
414414

415-
let response = await this.callAI(message, settings);
415+
try {
416+
const response = await this.callAI(message, settings, {
417+
onToken: (token) => {
418+
if (token == null) return;
419+
streamedText += token;
420+
aiMessageElement.innerHTML = this._escapeHtml(streamedText) + '<span class="ai-typing-cursor">|</span>';
421+
const chatAreaAfter = app.querySelector("#ai-chat-area");
422+
if (chatAreaAfter) {
423+
chatAreaAfter.scrollTop = chatAreaAfter.scrollHeight;
424+
}
425+
},
426+
onError: (err) => {
427+
const errText = err && err.message ? err.message : String(err);
428+
const safe = this._escapeHtml(errText).replace(/\n/g, "<br>");
429+
aiMessageElement.innerHTML = `<div class="ai-error">${safe}</div>`;
430+
if (aiMessageElement.parentElement) aiMessageElement.parentElement.classList.add("error");
431+
this.highlightCode(app);
432+
}
433+
});
416434

417-
await this.streamMessage(aiMessageElement, response, app, false);
435+
let finalResponse = response;
436+
if (typeof finalResponse !== "string") {
437+
finalResponse = String(finalResponse || "");
438+
}
418439

419-
const chatAreaAfter = app.querySelector("#ai-chat-area");
420-
if (chatAreaAfter) {
421-
requestAnimationFrame(() => {
422-
chatAreaAfter.scrollTop = chatAreaAfter.scrollHeight;
423-
});
424-
}
440+
const isLikelyError = /rate limit|rate-limit|quota|429|error|timeout|exceeded/i.test(finalResponse);
425441

426-
if (!this.currentSession) {
427-
this.currentSession = {
428-
id: uuidv4(),
429-
messages: [],
430-
};
431-
this.sessions.push(this.currentSession);
432-
}
442+
if (isLikelyError) {
443+
const safe = this._escapeHtml(finalResponse).replace(/\n/g, "<br>");
444+
aiMessageElement.innerHTML = `<div class="ai-error">${safe}</div>`;
445+
if (aiMessageElement.parentElement) aiMessageElement.parentElement.classList.add("error");
446+
this.highlightCode(app);
447+
} else {
448+
aiMessageElement.innerHTML = this.formatAIResponse(finalResponse);
449+
this.highlightCode(app);
450+
}
433451

434-
this.currentSession.messages.push({ role: "user", content: message });
435-
this.currentSession.messages.push({ role: "assistant", content: response });
452+
const chatAreaAfter = app.querySelector("#ai-chat-area");
453+
if (chatAreaAfter) {
454+
requestAnimationFrame(() => {
455+
chatAreaAfter.scrollTop = chatAreaAfter.scrollHeight;
456+
});
457+
}
436458

437-
this.saveSession().catch((e) => console.error("saveSession error:", e));
438-
} catch (error) {
439-
const errMsg = "Error: " + (error && error.message ? error.message : String(error));
440-
if (!aiMessageElement) {
441-
aiMessageElement = this.addMessageToChat("assistant", "", app);
459+
if (!this.currentSession) {
460+
this.currentSession = {
461+
id: uuidv4(),
462+
messages: [],
463+
};
464+
this.sessions.push(this.currentSession);
465+
}
466+
467+
this.currentSession.messages.push({ role: "user", content: message });
468+
this.currentSession.messages.push({ role: "assistant", content: finalResponse });
469+
470+
this.saveSession().catch((e) => console.error("saveSession error:", e));
471+
} catch (aiError) {
472+
const errText = aiError && aiError.message ? aiError.message : String(aiError);
473+
const safe = this._escapeHtml(errText).replace(/\n/g, "<br>");
474+
aiMessageElement.innerHTML = `<div class="ai-error">${safe}</div>`;
475+
if (aiMessageElement.parentElement) aiMessageElement.parentElement.classList.add("error");
476+
this.highlightCode(app);
442477
}
443-
await this.streamMessage(aiMessageElement, errMsg, app, true);
478+
} catch (error) {
444479
console.error("Error generating response:", error);
480+
window.toast("Error generating response: " + (error && error.message ? error.message : error), 3000);
445481
} finally {
446482
this.isGenerating = false;
447483
if (sendBtn) sendBtn.innerHTML = sendIconSvg;
@@ -478,44 +514,6 @@ class AIAssistant {
478514
return messageContent;
479515
}
480516

481-
async streamMessage(element, content, app, isError) {
482-
if (!element) return;
483-
const text = String(content || "");
484-
element.classList.remove("ai-error");
485-
element.innerHTML = "";
486-
element.textContent = "";
487-
if (isError) element.classList.add("ai-error");
488-
const total = text.length;
489-
let i = 0;
490-
const large = total > 2000;
491-
const chunk = large ? 8 : 1;
492-
const delay = large ? 20 : 18;
493-
while (i < total) {
494-
const part = text.slice(i, i + chunk);
495-
element.textContent = element.textContent + part;
496-
i += chunk;
497-
await new Promise((r) => setTimeout(r, delay));
498-
const chatArea = app && app.querySelector ? app.querySelector("#ai-chat-area") : null;
499-
if (chatArea) {
500-
requestAnimationFrame(() => {
501-
chatArea.scrollTop = chatArea.scrollHeight;
502-
});
503-
}
504-
}
505-
if (!isError && window.markdownit) {
506-
element.innerHTML = this.formatAIResponse(element.textContent);
507-
this.highlightCode(app);
508-
} else {
509-
element.innerHTML = String(element.textContent).replace(/</g, "&lt;").replace(/>/g, "&gt;").replace(/\n/g, "<br>");
510-
}
511-
const chatAreaFinal = app && app.querySelector ? app.querySelector("#ai-chat-area") : null;
512-
if (chatAreaFinal) {
513-
requestAnimationFrame(() => {
514-
chatAreaFinal.scrollTop = chatAreaFinal.scrollHeight;
515-
});
516-
}
517-
}
518-
519517
updateMessage(element, content, app) {
520518
if (!element) return;
521519
element.innerHTML = this.formatAIResponse(content);
@@ -572,19 +570,46 @@ class AIAssistant {
572570
};
573571
}
574572

575-
async callAI(message, settings) {
573+
_escapeHtml(str) {
574+
return String(str)
575+
.replace(/&/g, "&amp;")
576+
.replace(/</g, "&lt;")
577+
.replace(/>/g, "&gt;");
578+
}
579+
580+
async callAI(message, settings, handlers = {}) {
576581
const { apiKey, baseUrl, model } = settings;
577582

578583
if (!apiKey || !baseUrl || !model) {
579584
throw new Error("Please configure API settings first");
580585
}
581586

587+
const callbackManager = CallbackManager.fromHandlers({
588+
async handleLLMNewToken(token) {
589+
try {
590+
if (handlers && typeof handlers.onToken === "function") handlers.onToken(token);
591+
} catch (e) {}
592+
},
593+
async handleLLMError(err) {
594+
try {
595+
if (handlers && typeof handlers.onError === "function") handlers.onError(err);
596+
} catch (e) {}
597+
},
598+
async handleLLMEnd(output) {
599+
try {
600+
if (handlers && typeof handlers.onEnd === "function") handlers.onEnd(output);
601+
} catch (e) {}
602+
}
603+
});
604+
582605
const chatModel = new ChatOpenAI({
583606
openAIApiKey: apiKey,
584607
modelName: model,
585608
configuration: {
586609
baseURL: baseUrl,
587610
},
611+
streaming: true,
612+
callbackManager,
588613
});
589614

590615
const promptTemplate = ChatPromptTemplate.fromMessages([

0 commit comments

Comments
 (0)