Skip to content

Commit

Permalink
🐛 fix: fix only inject welcome question in inbox (lobehub#2289)
Browse files Browse the repository at this point in the history
* ⚡️ perf: improve conversation loading performance

* 🐛 fix: fix the welcome message inject

* 🐛 fix: fix the welcome message inject
  • Loading branch information
arvinxx authored Apr 29, 2024
1 parent 6d9d070 commit cc8edd3
Show file tree
Hide file tree
Showing 10 changed files with 65 additions and 41 deletions.
4 changes: 2 additions & 2 deletions src/app/chat/(desktop)/features/ChatInput/Footer/SendMore.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ const SendMore = memo(() => {
hotKey,
(keyboardEvent, hotkeysEvent) => {
console.log(keyboardEvent, hotkeysEvent);
sendMessage(true);
sendMessage({ onlyAddUserMessage: true });
},
{
enableOnFormTags: true,
Expand Down Expand Up @@ -94,7 +94,7 @@ const SendMore = memo(() => {
</Flexbox>
),
onClick: () => {
sendMessage(true);
sendMessage({ onlyAddUserMessage: true });
},
},
],
Expand Down
10 changes: 8 additions & 2 deletions src/features/ChatInput/useSend.ts
Original file line number Diff line number Diff line change
@@ -1,15 +1,21 @@
import { useCallback } from 'react';

import { useChatStore } from '@/store/chat';
import { SendMessageParams } from '@/store/chat/slices/message/action';
import { filesSelectors, useFileStore } from '@/store/file';

export type UseSendMessageParams = Pick<
SendMessageParams,
'onlyAddUserMessage' | 'isWelcomeQuestion'
>;

export const useSendMessage = () => {
const [sendMessage, updateInputMessage] = useChatStore((s) => [
s.sendMessage,
s.updateInputMessage,
]);

return useCallback((onlyAddUserMessage?: boolean) => {
return useCallback((params: UseSendMessageParams = {}) => {
const store = useChatStore.getState();
if (!!store.chatLoadingId) return;
if (!store.inputMessage) return;
Expand All @@ -19,7 +25,7 @@ export const useSendMessage = () => {
sendMessage({
files: imageList,
message: store.inputMessage,
onlyAddUserMessage: onlyAddUserMessage,
...params,
});

updateInputMessage('');
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@ import { useTranslation } from 'react-i18next';
import { Flexbox } from 'react-layout-kit';

import { USAGE_DOCUMENTS } from '@/const/url';
import { useChatInput } from '@/features/ChatInput/useChatInput';
import { useSendMessage } from '@/features/ChatInput/useSend';
import { useChatStore } from '@/store/chat';

const useStyles = createStyles(({ css, token }) => ({
card: css`
Expand Down Expand Up @@ -54,14 +55,10 @@ const qa = shuffle([
]).slice(0, 5);

const QuestionSuggest = memo(() => {
const { onInput, onSend } = useChatInput();
const [updateInputMessage] = useChatStore((s) => [s.updateInputMessage]);
const { t } = useTranslation('welcome');
const { styles } = useStyles();

const handoleSend = (qa: string) => {
onInput(qa);
onSend();
};
const sendMessage = useSendMessage();

return (
<Flexbox gap={8} width={'100%'}>
Expand All @@ -85,7 +82,10 @@ const QuestionSuggest = memo(() => {
gap={8}
horizontal
key={item}
onClick={() => handoleSend(text)}
onClick={() => {
updateInputMessage(text);
sendMessage({ isWelcomeQuestion: true });
}}
>
{t(text)}
</Flexbox>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,11 @@ import { useChatStore } from '@/store/chat';
import { chatSelectors } from '@/store/chat/selectors';
import { isMobileScreen } from '@/utils/screen';

import { useInitConversation } from '../../hooks/useInitConversation';
import AutoScroll from '../AutoScroll';
import Item from '../ChatItem';
import InboxWelcome from '../InboxWelcome';
import SkeletonList from '../SkeletonList';

const WELCOME_ID = 'welcome';

Expand All @@ -29,6 +31,8 @@ interface VirtualizedListProps {
mobile?: boolean;
}
const VirtualizedList = memo<VirtualizedListProps>(({ mobile }) => {
useInitConversation();

const virtuosoRef = useRef<VirtuosoHandle>(null);
const [atBottom, setAtBottom] = useState(true);

Expand All @@ -52,7 +56,9 @@ const VirtualizedList = memo<VirtualizedListProps>(({ mobile }) => {
// overscan should be 1.5 times the height of the window
const overscan = typeof window !== 'undefined' ? window.innerHeight * 1.5 : 0;

return chatLoading && data.length === 2 ? null : (
return chatLoading ? (
<SkeletonList mobile={mobile} />
) : (
<Flexbox height={'100%'}>
<Virtuoso
atBottomStateChange={setAtBottom}
Expand Down
15 changes: 6 additions & 9 deletions src/features/Conversation/index.tsx
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
import { createStyles } from 'antd-style';
import { ReactNode, memo } from 'react';
import { ReactNode, Suspense, lazy, memo } from 'react';
import { Flexbox } from 'react-layout-kit';

import ChatHydration from '@/components/StoreHydration/ChatHydration';
import { useChatStore } from '@/store/chat';

import SkeletonList from './components/SkeletonList';
import ChatList from './components/VirtualizedList';
import { useInitConversation } from './hooks/useInitConversation';

const ChatList = lazy(() => import('./components/VirtualizedList'));

const useStyles = createStyles(
({ css, responsive, stylish }) => css`
Expand All @@ -30,18 +29,16 @@ interface ConversationProps {
const Conversation = memo<ConversationProps>(({ chatInput, mobile }) => {
const { styles } = useStyles();

useInitConversation();

const [messagesInit] = useChatStore((s) => [s.messagesInit]);

return (
<Flexbox
flex={1}
// `relative` is required, ChatInput's absolute position needs it
style={{ position: 'relative' }}
>
<div className={styles}>
{messagesInit ? <ChatList mobile={mobile} /> : <SkeletonList mobile={mobile} />}
<Suspense fallback={<SkeletonList mobile={mobile} />}>
<ChatList mobile={mobile} />
</Suspense>
</div>
{chatInput}
<ChatHydration />
Expand Down
10 changes: 8 additions & 2 deletions src/services/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ import { createHeaderWithAuth, getProviderAuthPayload } from './_auth';
import { API_ENDPOINTS } from './_url';

interface FetchOptions {
isWelcomeQuestion?: boolean;
signal?: AbortSignal | undefined;
trace?: TracePayload;
}
Expand Down Expand Up @@ -65,6 +66,7 @@ interface FetchAITaskResultParams {

interface CreateAssistantMessageStream extends FetchSSEOptions {
abortController?: AbortController;
isWelcomeQuestion?: boolean;
params: GetChatCompletionPayload;
trace?: TracePayload;
}
Expand Down Expand Up @@ -220,10 +222,12 @@ class ChatService {
onErrorHandle,
onFinish,
trace,
isWelcomeQuestion,
}: CreateAssistantMessageStream) => {
await fetchSSE(
() =>
this.createAssistantMessage(params, {
isWelcomeQuestion,
signal: abortController?.signal,
trace: this.mapTrace(trace, TraceTagMap.Chat),
}),
Expand Down Expand Up @@ -432,9 +436,11 @@ class ChatService {
});

return produce(postMessages, (draft) => {
// Inject InboxGuide SystemRole
// if it's a welcome question, inject InboxGuide SystemRole
const inboxGuideSystemRole =
options?.trace?.sessionId === INBOX_SESSION_ID && INBOX_GUIDE_SYSTEMROLE;
options?.isWelcomeQuestion &&
options?.trace?.sessionId === INBOX_SESSION_ID &&
INBOX_GUIDE_SYSTEMROLE;

// Inject Tool SystemRole
const hasTools = tools && tools?.length > 0;
Expand Down
6 changes: 1 addition & 5 deletions src/store/chat/slices/message/action.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -385,11 +385,7 @@ describe('chatMessage actions', () => {
});

expect(messageService.removeMessage).not.toHaveBeenCalledWith(messageId);
expect(mockState.coreProcessMessage).toHaveBeenCalledWith(
expect.any(Array),
messageId,
undefined,
);
expect(mockState.coreProcessMessage).toHaveBeenCalledWith(expect.any(Array), messageId, {});
});

it('should not perform any action if the message id does not exist', async () => {
Expand Down
33 changes: 23 additions & 10 deletions src/store/chat/slices/message/action.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,20 @@ const n = setNamespace('message');

const SWR_USE_FETCH_MESSAGES = 'SWR_USE_FETCH_MESSAGES';

interface SendMessageParams {
export interface SendMessageParams {
message: string;
files?: { id: string; url: string }[];
onlyAddUserMessage?: boolean;
/**
*
* https://github.com/lobehub/lobe-chat/pull/2086
*/
isWelcomeQuestion?: boolean;
}

interface ProcessMessageParams {
traceId?: string;
isWelcomeQuestion?: boolean;
}

export interface ChatMessageAction {
Expand Down Expand Up @@ -77,7 +87,7 @@ export interface ChatMessageAction {
coreProcessMessage: (
messages: ChatMessage[],
parentId: string,
traceId?: string,
params?: ProcessMessageParams,
) => Promise<void>;
/**
* 实际获取 AI 响应
Expand All @@ -87,7 +97,7 @@ export interface ChatMessageAction {
fetchAIChatMessage: (
messages: ChatMessage[],
assistantMessageId: string,
traceId?: string,
params?: ProcessMessageParams,
) => Promise<{
content: string;
functionCallAtEnd: boolean;
Expand Down Expand Up @@ -173,7 +183,7 @@ export const chatMessage: StateCreator<
await messageService.removeAllMessages();
await refreshMessages();
},
sendMessage: async ({ message, files, onlyAddUserMessage }) => {
sendMessage: async ({ message, files, onlyAddUserMessage, isWelcomeQuestion }) => {
const { coreProcessMessage, activeTopicId, activeId } = get();
if (!activeId) return;

Expand All @@ -200,7 +210,7 @@ export const chatMessage: StateCreator<
// Get the current messages to generate AI response
const messages = chatSelectors.currentChats(get());

await coreProcessMessage(messages, id);
await coreProcessMessage(messages, id, { isWelcomeQuestion });

// check activeTopic and then auto create topic
const chats = chatSelectors.currentChats(get());
Expand Down Expand Up @@ -263,6 +273,8 @@ export const chatMessage: StateCreator<
async ([, sessionId, topicId]: [string, string, string | undefined]) =>
messageService.getMessages(sessionId, topicId),
{
suspense: true,
fallbackData: [],
onSuccess: (messages, key) => {
set(
{ activeId: sessionId, messages, messagesInit: true },
Expand All @@ -280,7 +292,7 @@ export const chatMessage: StateCreator<
},

// the internal process method of the AI message
coreProcessMessage: async (messages, userMessageId, trace) => {
coreProcessMessage: async (messages, userMessageId, params) => {
const { fetchAIChatMessage, triggerFunctionCall, refreshMessages, activeTopicId } = get();

const { model, provider } = getAgentConfig();
Expand All @@ -301,7 +313,7 @@ export const chatMessage: StateCreator<

// 2. fetch the AI response
const { isFunctionCall, content, functionCallAtEnd, functionCallContent, traceId } =
await fetchAIChatMessage(messages, mid, trace);
await fetchAIChatMessage(messages, mid, params);

// 3. if it's the function call message, trigger the function method
if (isFunctionCall) {
Expand Down Expand Up @@ -341,7 +353,7 @@ export const chatMessage: StateCreator<

set({ messages }, false, n(`dispatchMessage/${payload.type}`, payload));
},
fetchAIChatMessage: async (messages, assistantId, traceId) => {
fetchAIChatMessage: async (messages, assistantId, params) => {
const {
toggleChatLoading,
refreshMessages,
Expand Down Expand Up @@ -421,11 +433,12 @@ export const chatMessage: StateCreator<
plugins: config.plugins,
},
trace: {
traceId,
traceId: params?.traceId,
sessionId: get().activeId,
topicId: get().activeTopicId,
traceName: TraceNameMap.Conversation,
},
isWelcomeQuestion: params?.isWelcomeQuestion,
onErrorHandle: async (error) => {
await messageService.updateMessageError(assistantId, error);
await refreshMessages();
Expand Down Expand Up @@ -567,7 +580,7 @@ export const chatMessage: StateCreator<

if (!latestMsg) return;

await coreProcessMessage(contextMessages, latestMsg.id, traceId);
await coreProcessMessage(contextMessages, latestMsg.id, { traceId });
},

internalUpdateMessageContent: async (id, content) => {
Expand Down
2 changes: 1 addition & 1 deletion src/store/chat/slices/plugin/action.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ describe('ChatPluginAction', () => {
expect(result.current.coreProcessMessage).toHaveBeenCalledWith(
mockCurrentChats,
messageId,
undefined,
{},
);
});
it('should update message content and not trigger ai message', async () => {
Expand Down
2 changes: 1 addition & 1 deletion src/store/chat/slices/plugin/action.ts
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ export const chatPlugin: StateCreator<
triggerAIMessage: async (id, traceId) => {
const { coreProcessMessage } = get();
const chats = chatSelectors.currentChats(get());
await coreProcessMessage(chats, id, traceId);
await coreProcessMessage(chats, id, { traceId });
},

triggerFunctionCall: async (id) => {
Expand Down

0 comments on commit cc8edd3

Please sign in to comment.