Skip to content

Commit ffe7b0f

Browse files
Improvements (#35)
1 parent 7d09a08 commit ffe7b0f

File tree

4 files changed

+51
-6
lines changed

4 files changed

+51
-6
lines changed

src/pages/sidePanel/PageSummary.tsx

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,10 @@ export default function PageSummary({ loading, summary, taskType }) {
1414
</div>
1515
) : summary ? (
1616
<div>
17-
<div className="content-box">{summary.text}</div>
17+
<div className="content-box">
18+
<h2 className="summary-title">{summary.title}</h2>
19+
<div className="summary-body">{summary.text}</div>
20+
</div>
1821
<div className="form-container">
1922
<PageMetadata metadata={summary} taskType={taskType} />
2023
</div>

src/pages/sidePanel/QandA.ts

Lines changed: 34 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ export type ConversationalRetrievalQAChainInput = {
2727
async function setupVectorstore(selectedModel) {
2828
console.log('Setting up vectorstore', selectedModel);
2929
const embeddings = new HuggingFaceTransformersEmbeddings({
30-
modelName: 'Supabase/gte-small',
30+
modelName: 'Xenova/jina-embeddings-v2-small-en',
3131
});
3232
const voyClient = new VoyClient();
3333
return new VoyVectorStore(voyClient, embeddings);
@@ -49,6 +49,16 @@ export async function embedDocs(selectedModel, localFile): Promise<EmbedDocsOutp
4949
documents.push(
5050
new Document({
5151
pageContent: pageContent.textContent,
52+
metadata: {
53+
pageURL: pageContent.pageURL,
54+
title: pageContent.title,
55+
length: pageContent.length,
56+
excerpt: pageContent.excerpt,
57+
byline: pageContent.byline,
58+
dir: pageContent.dir,
59+
siteName: pageContent.siteName,
60+
lang: pageContent.lang,
61+
},
5262
}),
5363
);
5464
} else {
@@ -77,8 +87,6 @@ export async function* talkToDocument(selectedModel, vectorStore, input: Convers
7787
console.log('chat_history', input.chat_history);
7888
console.log('vectorStore', vectorStore);
7989
const retriever = vectorStore.asRetriever();
80-
const context = retriever.pipe(formatDocumentsAsString);
81-
console.log('context', context);
8290
const condenseQuestionTemplate = `Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question.
8391
8492
Chat History:
@@ -91,8 +99,15 @@ export async function* talkToDocument(selectedModel, vectorStore, input: Convers
9199
Do not use any other sources of information.
92100
Do not provide any answer that is not based on the context.
93101
If there is no answer, type "Not sure based on the context".
102+
Additionally you will be given metadata like
103+
title,content,length,excerpt,byline,dir,siteName,lang
104+
in the metadata field. Use this information to help you answer the question.
105+
94106
{context}
95107
108+
Metadata:
109+
{metadata}
110+
96111
Question: {question}
97112
Answer:
98113
`);
@@ -109,6 +124,7 @@ export async function* talkToDocument(selectedModel, vectorStore, input: Convers
109124
{
110125
context: retriever.pipe(formatDocumentsAsString),
111126
question: new RunnablePassthrough(),
127+
metadata: retriever.pipe(documents => getMetadataString(documents[0].metadata)),
112128
},
113129
prompt,
114130
llm,
@@ -122,6 +138,20 @@ export async function* talkToDocument(selectedModel, vectorStore, input: Convers
122138
}
123139
}
124140

141+
function getMetadataString(metadata) {
142+
const result = [];
143+
144+
for (const key in metadata) {
145+
// Check if the property is not an object and not an array
146+
if (Object.prototype.hasOwnProperty.call(metadata, key) && typeof metadata[key] !== 'object') {
147+
result.push(`${key}: ${metadata[key]}`);
148+
}
149+
}
150+
console.log('result', result);
151+
152+
return result.join(' ');
153+
}
154+
125155
export const formatChatHistory = (chatHistory: { question: string; answer: string }[]) => {
126156
console.log('chatHistory', chatHistory);
127157
const formattedDialogueTurns = chatHistory.map(
@@ -174,7 +204,7 @@ export async function* chatWithLLM(selectedModel, input: ConversationalRetrieval
174204
const llm = new ChatOllama({
175205
baseUrl: OLLAMA_BASE_URL,
176206
model: selectedModel,
177-
temperature: 0,
207+
temperature: 0.3,
178208
});
179209
const chatPrompt = ChatPromptTemplate.fromMessages([
180210
[
@@ -207,7 +237,6 @@ export async function* chatWithLLM(selectedModel, input: ConversationalRetrieval
207237
});
208238

209239
for await (const chunk of stream) {
210-
console.log('chunk', chunk);
211240
yield chunk.response;
212241
}
213242
}

src/pages/sidePanel/SidePanel.css

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -544,3 +544,14 @@
544544
.spin {
545545
animation: spin 2s linear infinite;
546546
}
547+
548+
.summary-title {
549+
color: #61dafb;
550+
/* Or any color that fits your design */
551+
margin: 20px 0;
552+
/* Adjust margin as needed */
553+
font-size: calc(10px + 2vmin);
554+
/* Adjust font size as needed */
555+
text-align: center;
556+
/* If you want to center the title */
557+
}

src/pages/sidePanel/Summarize.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import { RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
55
import { OLLAMA_BASE_URL } from '@src/pages/sidePanel/QandA';
66

77
export type SummarizationResponse = {
8+
title?: string;
89
text: string;
910
pageURL: string;
1011
tabID?: number;
@@ -33,6 +34,7 @@ async function summarizeCurrentPage(selectedModel) {
3334
input_documents: docs,
3435
});
3536
return {
37+
title: pageContent.title,
3638
text: response.text,
3739
pageURL: pageContent.pageURL,
3840
tabID: pageContent.tabID,

0 commit comments

Comments
 (0)