Skip to content

Commit

Permalink
Call is now cancelled when chat mode is switched midstream
Browse files Browse the repository at this point in the history
  • Loading branch information
KronemeyerJoshua committed May 14, 2024
1 parent 1819d00 commit 8d1cac1
Show file tree
Hide file tree
Showing 3 changed files with 69 additions and 56 deletions.
5 changes: 3 additions & 2 deletions app/frontend/src/api/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ import { ChatResponse,
getMaxCSVFileSizeType,
} from "./models";

export async function chatApi(options: ChatRequest): Promise<Response> {
export async function chatApi(options: ChatRequest, signal: AbortSignal): Promise<Response> {
const response = await fetch("/chat", {
method: "POST",
headers: {
Expand Down Expand Up @@ -49,7 +49,8 @@ export async function chatApi(options: ChatRequest): Promise<Response> {
},
citation_lookup: options.citation_lookup,
thought_chain: options.thought_chain
})
}),
signal: signal
});

if (response.status > 299 || !response.ok) {
Expand Down
112 changes: 60 additions & 52 deletions app/frontend/src/components/CharacterStreamer/CharacterStreamer.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -15,66 +15,74 @@ const CharacterStreamer = ({ eventSource, nonEventString, onStreamingComplete, c
const [dots, setDots] = useState('');

const handleStream = async () => {
var response = {} as ChatResponse
if (readableStream && !readableStream.locked) {
for await (const event of readNDJSONStream(readableStream)) {
if (event["data_points"]) {
response = {
answer: "",
thoughts: event["thoughts"],
data_points: event["data_points"],
approach: approach,
thought_chain: {
"work_response": event["thought_chain"]["work_response"],
"web_response": event["thought_chain"]["web_response"]
},
work_citation_lookup: event["work_citation_lookup"],
web_citation_lookup: event["web_citation_lookup"]
}
try {
var response = {} as ChatResponse
if (readableStream && !readableStream.locked) {
for await (const event of readNDJSONStream(readableStream)) {
if (event["data_points"]) {
response = {
answer: "",
thoughts: event["thoughts"],
data_points: event["data_points"],
approach: approach,
thought_chain: {
"work_response": event["thought_chain"]["work_response"],
"web_response": event["thought_chain"]["web_response"]
},
work_citation_lookup: event["work_citation_lookup"],
web_citation_lookup: event["web_citation_lookup"]
}
}
else if (event["content"]) {
response.answer += event["content"]
queueRef.current = queueRef.current.concat(event["content"].split(''));
if (!processingRef.current) {
processQueue();
}
}
else if (event["error"]) {
if (setError) {
setError(event["error"])
return
}
else {
console.error(event["error"])
return
}
}
}
else if (event["content"]) {
response.answer += event["content"]
queueRef.current = queueRef.current.concat(event["content"].split(''));
if (!processingRef.current) {
processQueue();
}
}
else if (event["error"]) {
if (setError) {
setError(event["error"])
return
}
else {
console.error(event["error"])
return
}
if (setAnswer) {
// We need to set these values in the thought_chain so that the compare works
if (approach === Approaches.ChatWebRetrieveRead) {
response.thought_chain["web_response"] = response.answer
}
else if (approach === Approaches.ReadRetrieveRead) {
response.thought_chain["work_response"] = response.answer
}
else if (approach === Approaches.GPTDirect) {
response.thought_chain["ungrounded_response"] = response.answer
}
else if (approach === Approaches.CompareWebWithWork) {
response.thought_chain["web_to_work_comparison_response"] = response.answer
}
else if (approach === Approaches.CompareWorkWithWeb) {
response.thought_chain["work_to_web_comparison_response"] = response.answer
}

setAnswer(response)
}
}
if (setAnswer) {
// We need to set these values in the thought_chain so that the compare works
if (approach === Approaches.ChatWebRetrieveRead) {
response.thought_chain["web_response"] = response.answer
}
else if (approach === Approaches.ReadRetrieveRead) {
response.thought_chain["work_response"] = response.answer
}
else if (approach === Approaches.GPTDirect) {
response.thought_chain["ungrounded_response"] = response.answer
}
else if (approach === Approaches.CompareWebWithWork) {
response.thought_chain["web_to_work_comparison_response"] = response.answer
}
else if (approach === Approaches.CompareWorkWithWeb) {
response.thought_chain["work_to_web_comparison_response"] = response.answer
}
catch (e : any) {
if (e.name !== 'AbortError')
{
console.error(e);
}

setAnswer(response)
}
}
}

if (readableStream) {
handleStream();
handleStream();
}

useEffect(() => {
Expand Down
8 changes: 6 additions & 2 deletions app/frontend/src/pages/chat/Chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ const Chat = () => {
const [selectedAnswer, setSelectedAnswer] = useState<number>(0);
const [answers, setAnswers] = useState<[user: string, response: ChatResponse][]>([]);
const [answerStream, setAnswerStream] = useState<ReadableStream | undefined>(undefined);
const [abortController, setAbortController] = useState<AbortController | undefined>(undefined);

async function fetchFeatureFlags() {
try {
Expand Down Expand Up @@ -133,8 +134,10 @@ const Chat = () => {
};

setAnswers([...answers, [question, temp]]);

const result = await chatApi(request);
const controller = new AbortController();
setAbortController(controller);
const signal = controller.signal;
const result = await chatApi(request, signal);
if (!result.body) {
throw Error("No response body");
}
Expand Down Expand Up @@ -237,6 +240,7 @@ const Chat = () => {
};

const onChatModeChange = (_ev: any) => {
abortController?.abort();
const chatMode = _ev.target.value as ChatMode || ChatMode.WorkOnly;
setChatMode(chatMode);
if (chatMode == ChatMode.WorkOnly)
Expand Down

0 comments on commit 8d1cac1

Please sign in to comment.