Skip to content

Llm format change #459

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 10 commits into from
Jun 21, 2024
13 changes: 7 additions & 6 deletions backend/score.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,12 +191,13 @@ async def extract_knowledge_graph_from_file(
error_message = str(e)
graphDb_data_Access.update_exception_db(file_name,error_message)
gcs_file_cache = os.environ.get('GCS_FILE_CACHE')
if source_type == 'local file' and gcs_file_cache == 'True':
folder_name = create_gcs_bucket_folder_name_hashed(uri,file_name)
delete_file_from_gcs(BUCKET_UPLOAD,folder_name,file_name)
else:
logging.info(f'Deleted File Path: {merged_file_path} and Deleted File Name : {file_name}')
delete_uploaded_local_file(merged_file_path,file_name)
if source_type == 'local file':
if gcs_file_cache == 'True':
folder_name = create_gcs_bucket_folder_name_hashed(uri,file_name)
delete_file_from_gcs(BUCKET_UPLOAD,folder_name,file_name)
else:
logging.info(f'Deleted File Path: {merged_file_path} and Deleted File Name : {file_name}')
delete_uploaded_local_file(merged_file_path,file_name)
josn_obj = {'message':message,'error_message':error_message, 'file_name': file_name,'status':'Failed','db_url':uri,'failed_count':1, 'source_type': source_type}
logger.log_struct(josn_obj)
logging.exception(f'File Failed in extraction: {josn_obj}')
Expand Down
13 changes: 6 additions & 7 deletions backend/src/QA_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,14 +78,13 @@ def get_llm(model: str,max_tokens=1000) -> Any:
"""Retrieve the specified language model based on the model name."""

model_versions = {
"OpenAI GPT 3.5": "gpt-3.5-turbo-16k",
"Gemini Pro": "gemini-1.0-pro-001",
"Gemini 1.5 Pro": "gemini-1.5-pro-preview-0409",
"OpenAI GPT 4": "gpt-4-0125-preview",
"Diffbot" : "gpt-4-0125-preview",
"OpenAI GPT 4o":"gpt-4o"
"gpt-3.5": "gpt-3.5-turbo-16k",
"gemini-1.0-pro": "gemini-1.0-pro-001",
"gemini-1.5-pro": "gemini-1.5-pro-preview-0409",
"gpt-4": "gpt-4-0125-preview",
"diffbot" : "gpt-4-0125-preview",
"gpt-4o":"gpt-4o"
}

if model in model_versions:
model_version = model_versions[model]
logging.info(f"Chat Model: {model}, Model Version: {model_version}")
Expand Down
20 changes: 10 additions & 10 deletions backend/src/shared/constants.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
MODEL_VERSIONS = {
"OpenAI GPT 3.5": "gpt-3.5-turbo-16k",
"Gemini 1.0 Pro": "gemini-1.0-pro-001",
"Gemini 1.5 Pro": "gemini-1.5-pro-preview-0514",
"OpenAI GPT 4": "gpt-4-0125-preview",
"Diffbot" : "gpt-4o",
"OpenAI GPT 4o":"gpt-4o",
"Groq llama3" : "llama3-70b-8192"
"gpt-3.5": "gpt-3.5-turbo-16k",
"gemini-1.0-pro": "gemini-1.0-pro-001",
"gemini-1.5-pro": "gemini-1.5-pro-preview-0514",
"gpt-4": "gpt-4-0125-preview",
"diffbot" : "gpt-4o",
"gpt-4o":"gpt-4o",
"groq-llama3" : "llama3-70b-8192"
}
OPENAI_MODELS = ["OpenAI GPT 3.5", "OpenAI GPT 4o"]
GEMINI_MODELS = ["Gemini 1.0 Pro", "Gemini 1.5 Pro"]
OPENAI_MODELS = ["gpt-3.5", "gpt-4o"]
GEMINI_MODELS = ["gemini-1.0-pro", "gemini-1.5-pro"]
CHAT_MAX_TOKENS = 1000
CHAT_SEARCH_KWARG_K = 3
CHAT_SEARCH_KWARG_SCORE_THRESHOLD = 0.7
GROQ_MODELS = ["Groq llama3"]
GROQ_MODELS = ["groq-llama3"]
BUCKET_UPLOAD = 'llm-graph-builder-upload'
PROJECT_ID = 'llm-experiments-387609'
51 changes: 32 additions & 19 deletions frontend/src/components/DropZone.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,9 @@ const DropZone: FunctionComponent = () => {
});

if (apiResponse?.data.status === 'Failed') {
throw new Error(`message:${apiResponse.data.message},fileName:${apiResponse.data.file_name}`);
throw new Error(
JSON.stringify({ message: apiResponse.data.message, fileName: apiResponse.data.file_name })
);
} else {
if (apiResponse.data.data) {
setFilesData((prevfiles) =>
Expand Down Expand Up @@ -166,24 +168,35 @@ const DropZone: FunctionComponent = () => {
uploadNextChunk();
}
} catch (error) {
setIsLoading(false);
setalertDetails({
showAlert: true,
alertType: 'error',
alertMessage: 'Error Occurred',
});
setFilesData((prevfiles) =>
prevfiles.map((curfile) => {
if (curfile.name == file.name) {
return {
...curfile,
status: 'Failed',
type: `${file.name.substring(file.name.lastIndexOf('.') + 1, file.name.length).toUpperCase()}`,
};
}
return curfile;
})
);
if (error instanceof Error) {
setIsLoading(false);
if (error.name === 'AxiosError') {
setalertDetails({
showAlert: true,
alertType: 'error',
alertMessage: error.message,
});
} else {
const parsedError = JSON.parse(error.message);
setalertDetails({
showAlert: true,
alertType: 'error',
alertMessage: parsedError.message,
});
}
setFilesData((prevfiles) =>
prevfiles.map((curfile) => {
if (curfile.name == file.name) {
return {
...curfile,
status: 'Failed',
type: `${file.name.substring(file.name.lastIndexOf('.') + 1, file.name.length).toUpperCase()}`,
};
}
return curfile;
})
);
}
}
} else {
setFilesData((prevfiles) =>
Expand Down
9 changes: 7 additions & 2 deletions frontend/src/components/Dropdown.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ import { Dropdown } from '@neo4j-ndl/react';
import { DropdownProps, OptionType } from '../types';
import { useMemo } from 'react';
import { defaultLLM, llms } from '../utils/Constants';
import { capitalize } from '../utils/Utils';


const LlmDropdown: React.FC<DropdownProps> = ({ onSelect, isDisabled }) => {
const handleChange = (selectedOption: OptionType | null | void) => {
Expand All @@ -17,9 +19,12 @@ const LlmDropdown: React.FC<DropdownProps> = ({ onSelect, isDisabled }) => {
aria-label='A selection dropdown'
selectProps={{
onChange: handleChange,
options: allOptions?.map((option) => ({ label: option, value: option })),
options: allOptions?.map((option) => ({
label: capitalize(option),
value: option,
})),
placeholder: 'Select LLM Model',
defaultValue: { label: defaultLLM, value: defaultLLM },
defaultValue: { label: capitalize(defaultLLM), value: defaultLLM },
menuPlacement: 'auto',
isDisabled,
}}
Expand Down
28 changes: 14 additions & 14 deletions frontend/src/components/FileTable.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,12 @@ import {
CellContext,
Table,
Row,
getSortedRowModel
getSortedRowModel,
} from '@tanstack/react-table';
import { useFileContext } from '../context/UsersFiles';
import { getSourceNodes } from '../services/GetFiles';
import { v4 as uuidv4 } from 'uuid';
import { statusCheck } from '../utils/Utils';
import { statusCheck, capitalize } from '../utils/Utils';
import { SourceNode, CustomFile, FileTableProps, UserCredentials, statusupdate, alertStateType } from '../types';
import { useCredentials } from '../context/UserCredentials';
import { MagnifyingGlassCircleIconSolid } from '@neo4j-ndl/react/icons';
Expand Down Expand Up @@ -273,7 +273,7 @@ const FileTable: React.FC<FileTableProps> = ({ isExpanded, connectionStatus, set
}),
columnHelper.accessor((row) => row.model, {
id: 'model',
cell: (info) => <i>{info.getValue()}</i>,
cell: (info) => <i>{capitalize(info.getValue())}</i>,
header: () => <span>Model</span>,
footer: (info) => info.column.id,
}),
Expand Down Expand Up @@ -345,14 +345,14 @@ const FileTable: React.FC<FileTableProps> = ({ isExpanded, connectionStatus, set
item?.fileSource === 's3 bucket' && localStorage.getItem('accesskey') === item?.awsAccessKeyId
? item?.status
: item?.fileSource === 'local file'
? item?.status
: item?.status === 'Completed' || item.status === 'Failed'
? item?.status
: item?.fileSource == 'Wikipedia' ||
item?.fileSource == 'youtube' ||
item?.fileSource == 'gcs bucket'
? item?.status
: 'N/A',
? item?.status
: item?.status === 'Completed' || item.status === 'Failed'
? item?.status
: item?.fileSource == 'Wikipedia' ||
item?.fileSource == 'youtube' ||
item?.fileSource == 'gcs bucket'
? item?.status
: 'N/A',
model: item?.model ?? model,
id: uuidv4(),
source_url: item?.url != 'None' && item?.url != '' ? item.url : '',
Expand All @@ -365,8 +365,8 @@ const FileTable: React.FC<FileTableProps> = ({ isExpanded, connectionStatus, set
language: item?.language ?? '',
processingProgress:
item?.processed_chunk != undefined &&
item?.total_chunks != undefined &&
!isNaN(Math.floor((item?.processed_chunk / item?.total_chunks) * 100))
item?.total_chunks != undefined &&
!isNaN(Math.floor((item?.processed_chunk / item?.total_chunks) * 100))
? Math.floor((item?.processed_chunk / item?.total_chunks) * 100)
: undefined,
total_pages: item?.total_pages ?? 0,
Expand Down Expand Up @@ -581,7 +581,7 @@ const FileTable: React.FC<FileTableProps> = ({ isExpanded, connectionStatus, set
enableMultiRowSelection: true,
getRowId: (row) => JSON.stringify({ ...row }),
enableSorting: true,
getSortedRowModel: getSortedRowModel()
getSortedRowModel: getSortedRowModel(),
});

useEffect(() => {
Expand Down
8 changes: 4 additions & 4 deletions frontend/src/components/GCSModal.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -84,10 +84,10 @@ const GCSModal: React.FC<GCSModalProps> = ({ hideModal, open, openGCSModal }) =>
} else if (apiResponse?.data?.success_count) {
showAlert('info', `Successfully Created Source Nodes for ${apiResponse.data.success_count} Files`);
} else if (apiResponse.data.failed_count) {
showAlert('error', `Failed to Created Source Node for ${apiResponse.data.failed_count} Files`);
} else {
showAlert('error', `Invalid Folder Name`);
}
showAlert('error', `Failed to Created Source Node for ${apiResponse.data.failed_count} Files`);
} else {
showAlert('error', `Invalid Folder Name`);
}
const copiedFilesData = [...filesData];
apiResponse?.data?.file_name?.forEach((item: fileName) => {
const filedataIndex = copiedFilesData.findIndex((filedataitem) => filedataitem?.name === item.fileName);
Expand Down
12 changes: 6 additions & 6 deletions frontend/src/utils/Constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,13 +32,13 @@ export const APP_SOURCES =
export const llms =
process.env?.LLM_MODELS?.trim() != ''
? process.env.LLM_MODELS?.split(',')
: ['Diffbot', 'Gemini 1.0 Pro', 'OpenAI GPT 3.5', 'OpenAI GPT 4o', 'Gemini 1.5 Pro', 'Groq llama3'];
: ['diffbot', 'gpt-3.5', 'gpt-4o', 'gemini-1.0-pro', 'gemini-1.5-pro', 'groq-llama3'];

export const defaultLLM = llms?.includes('OpenAI GPT 3.5')
? 'OpenAI GPT 3.5'
: llms?.includes('Gemini 1.0 Pro')
? 'Gemini 1.0 Pro'
: 'Diffbot';
export const defaultLLM = llms?.includes('gpt-3.5')
? 'gpt-3.5'
: llms?.includes('gemini-1.0-pro')
? 'gemini-1.0-pro'
: 'diffbot';

export const chunkSize = process.env.CHUNK_SIZE ? parseInt(process.env.CHUNK_SIZE) : 1 * 1024 * 1024;
export const timeperpage = process.env.TIME_PER_PAGE ? parseInt(process.env.TIME_PER_PAGE) : 50;
Expand Down
4 changes: 4 additions & 0 deletions frontend/src/utils/Utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -166,3 +166,7 @@ export const calculateProcessingTime = (fileSizeBytes: number, processingTimePer
const seconds = Math.floor(totalProcessingTimeSeconds % 60);
return { minutes, seconds };
};

export const capitalize = (word: string): string => {
return `${word[0].toUpperCase()}${word.slice(1)}`
}