Skip to content

Fix typo: correct 'josn_obj' to 'json_obj' #697

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 11 commits into from
Aug 20, 2024
Merged
69 changes: 34 additions & 35 deletions backend/score.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,8 +105,8 @@ async def create_source_knowledge_graph_url(
return create_api_response('Failed',message='source_type is other than accepted source')

message = f"Source Node created successfully for source type: {source_type} and source: {source}"
josn_obj = {'api_name':'url_scan','db_url':uri,'url_scanned_file':lst_file_name, 'source_url':source_url, 'wiki_query':wiki_query, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(josn_obj)
json_obj = {'api_name':'url_scan','db_url':uri,'url_scanned_file':lst_file_name, 'source_url':source_url, 'wiki_query':wiki_query, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(json_obj)
return create_api_response("Success",message=message,success_count=success_count,failed_count=failed_count,file_name=lst_file_name)
except Exception as e:
error_message = str(e)
Expand Down Expand Up @@ -208,9 +208,9 @@ async def extract_knowledge_graph_from_file(
else:
logging.info(f'Deleted File Path: {merged_file_path} and Deleted File Name : {file_name}')
delete_uploaded_local_file(merged_file_path,file_name)
josn_obj = {'message':message,'error_message':error_message, 'file_name': file_name,'status':'Failed','db_url':uri,'failed_count':1, 'source_type': source_type, 'source_url':source_url, 'wiki_query':wiki_query, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(josn_obj)
logging.exception(f'File Failed in extraction: {josn_obj}')
json_obj = {'message':message,'error_message':error_message, 'file_name': file_name,'status':'Failed','db_url':uri,'failed_count':1, 'source_type': source_type, 'source_url':source_url, 'wiki_query':wiki_query, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(json_obj)
logging.exception(f'File Failed in extraction: {json_obj}')
return create_api_response('Failed', message=message + error_message[:100], error=error_message, file_name = file_name)
finally:
gc.collect()
Expand All @@ -225,8 +225,8 @@ async def get_source_list(uri:str, userName:str, password:str, database:str=None
if " " in uri:
uri = uri.replace(" ","+")
result = await asyncio.to_thread(get_source_list_from_graph,uri,userName,decoded_password,database)
josn_obj = {'api_name':'sources_list','db_url':uri, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(josn_obj)
json_obj = {'api_name':'sources_list','db_url':uri, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(json_obj)
return create_api_response("Success",data=result)
except Exception as e:
job_status = "Failed"
Expand All @@ -243,19 +243,18 @@ async def post_processing(uri=Form(), userName=Form(), password=Form(), database

if "materialize_text_chunk_similarities" in tasks:
await asyncio.to_thread(update_graph, graph)
josn_obj = {'api_name': 'post_processing/materialize_text_chunk_similarities', 'db_url': uri, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(josn_obj)
json_obj = {'api_name': 'post_processing/update_similarity_graph', 'db_url': uri, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(json_obj)
logging.info(f'Updated KNN Graph')
if "enable_hybrid_search_and_fulltext_search_in_bloom" in tasks:
await asyncio.to_thread(create_fulltext, uri=uri, username=userName, password=password, database=database,type="entities")
await asyncio.to_thread(create_fulltext, uri=uri, username=userName, password=password, database=database,type="keyword")
josn_obj = {'api_name': 'post_processing/enable_hybrid_search_and_fulltext_search_in_bloom', 'db_url': uri, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(josn_obj)
if "create_fulltext_index" in tasks:
await asyncio.to_thread(create_fulltext, uri=uri, username=userName, password=password, database=database)
json_obj = {'api_name': 'post_processing/create_fulltext_index', 'db_url': uri, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(json_obj)
logging.info(f'Full Text index created')
if os.environ.get('ENTITY_EMBEDDING','False').upper()=="TRUE" and "materialize_entity_similarities" in tasks:
await asyncio.to_thread(create_entity_embedding, graph)
josn_obj = {'api_name': 'post_processing/materialize_entity_similarities', 'db_url': uri, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(josn_obj)
json_obj = {'api_name': 'post_processing/create_entity_embedding', 'db_url': uri, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(json_obj)
logging.info(f'Entity Embeddings created')
return create_api_response('Success', message='All tasks completed successfully')

Expand Down Expand Up @@ -284,8 +283,8 @@ async def chat_bot(uri=Form(),model=Form(None),userName=Form(), password=Form(),
logging.info(f"Total Response time is {total_call_time:.2f} seconds")
result["info"]["response_time"] = round(total_call_time, 2)

josn_obj = {'api_name':'chat_bot','db_url':uri,'session_id':session_id, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(josn_obj)
json_obj = {'api_name':'chat_bot','db_url':uri,'session_id':session_id, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(json_obj)
return create_api_response('Success',data=result)
except Exception as e:
job_status = "Failed"
Expand All @@ -301,8 +300,8 @@ async def chunk_entities(uri=Form(),userName=Form(), password=Form(), chunk_ids=
try:
logging.info(f"URI: {uri}, Username: {userName}, chunk_ids: {chunk_ids}")
result = await asyncio.to_thread(get_entities_from_chunkids,uri=uri, username=userName, password=password, chunk_ids=chunk_ids)
josn_obj = {'api_name':'chunk_entities','db_url':uri, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(josn_obj)
json_obj = {'api_name':'chunk_entities','db_url':uri, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(json_obj)
return create_api_response('Success',data=result)
except Exception as e:
job_status = "Failed"
Expand All @@ -329,8 +328,8 @@ async def graph_query(
password=password,
document_names=document_names
)
josn_obj = {'api_name':'graph_query','db_url':uri,'document_names':document_names, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(josn_obj)
json_obj = {'api_name':'graph_query','db_url':uri,'document_names':document_names, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(json_obj)
return create_api_response('Success', data=result)
except Exception as e:
job_status = "Failed"
Expand Down Expand Up @@ -361,10 +360,10 @@ async def clear_chat_bot(uri=Form(),userName=Form(), password=Form(), database=F
async def connect(uri=Form(), userName=Form(), password=Form(), database=Form()):
try:
graph = create_graph_database_connection(uri, userName, password, database)
result = await asyncio.to_thread(connection_check_and_get_vector_dimensions, graph)
josn_obj = {'api_name':'connect','db_url':uri,'status':result, 'count':1, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(josn_obj)
return create_api_response('Success',data=result)
result = await asyncio.to_thread(connection_check, graph)
json_obj = {'api_name':'connect','db_url':uri,'status':result, 'count':1, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(json_obj)
return create_api_response('Success',message=result)
except Exception as e:
job_status = "Failed"
message="Connection failed to connect Neo4j database"
Expand All @@ -379,8 +378,8 @@ async def upload_large_file_into_chunks(file:UploadFile = File(...), chunkNumber
try:
graph = create_graph_database_connection(uri, userName, password, database)
result = await asyncio.to_thread(upload_file, graph, model, file, chunkNumber, totalChunks, originalname, uri, CHUNK_DIR, MERGED_DIR)
josn_obj = {'api_name':'upload','db_url':uri, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(josn_obj)
json_obj = {'api_name':'upload','db_url':uri, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(json_obj)
if int(chunkNumber) == int(totalChunks):
return create_api_response('Success',data=result, message='Source Node Created Successfully')
else:
Expand All @@ -401,8 +400,8 @@ async def get_structured_schema(uri=Form(), userName=Form(), password=Form(), da
graph = create_graph_database_connection(uri, userName, password, database)
result = await asyncio.to_thread(get_labels_and_relationtypes, graph)
logging.info(f'Schema result from DB: {result}')
josn_obj = {'api_name':'schema','db_url':uri, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(josn_obj)
json_obj = {'api_name':'schema','db_url':uri, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(json_obj)
return create_api_response('Success', data=result)
except Exception as e:
message="Unable to get the labels and relationtypes from neo4j database"
Expand Down Expand Up @@ -468,10 +467,10 @@ async def delete_document_and_entities(uri=Form(),
graph = create_graph_database_connection(uri, userName, password, database)
graphDb_data_Access = graphDBdataAccess(graph)
result, files_list_size = await asyncio.to_thread(graphDb_data_Access.delete_file_from_graph, filenames, source_types, deleteEntities, MERGED_DIR, uri)
# entities_count = result[0]['deletedEntities'] if 'deletedEntities' in result[0] else 0
message = f"Deleted {files_list_size} documents with entities from database"
josn_obj = {'api_name':'delete_document_and_entities','db_url':uri, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(josn_obj)
entities_count = result[0]['deletedEntities'] if 'deletedEntities' in result[0] else 0
message = f"Deleted {files_list_size} documents with {entities_count} entities from database"
json_obj = {'api_name':'delete_document_and_entities','db_url':uri, 'logging_time': formatted_time(datetime.now(timezone.utc))}
logger.log_struct(json_obj)
return create_api_response('Success',message=message)
except Exception as e:
job_status = "Failed"
Expand Down Expand Up @@ -627,4 +626,4 @@ async def merge_duplicate_nodes(uri=Form(), userName=Form(), password=Form(), da
gc.collect()

if __name__ == "__main__":
uvicorn.run(app)
uvicorn.run(app)
2 changes: 1 addition & 1 deletion frontend/src/components/Graph/GraphViewModal.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -356,4 +356,4 @@ const GraphViewModal: React.FunctionComponent<GraphViewModalProps> = ({
</>
);
};
export default GraphViewModal;
export default GraphViewModal;
2 changes: 1 addition & 1 deletion frontend/src/components/Graph/LegendsChip.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import Legend from '../UI/Legend';
export const LegendsChip: React.FunctionComponent<LegendChipProps> = ({ scheme, title, nodes }) => {
const chunkcount = useMemo(
() => [...new Set(nodes?.filter((n) => n?.labels?.includes(title)).map((i) => i.id))].length,
[nodes]
[]
);

return <Legend title={title} chunkCount={chunkcount} bgColor={scheme[title]}></Legend>;
Expand Down
2 changes: 1 addition & 1 deletion frontend/src/components/Layout/PageLayout.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -175,4 +175,4 @@ export default function PageLayoutNew({
/>
</div>
);
}
}
2 changes: 1 addition & 1 deletion frontend/src/components/Popups/DeletePopUp/DeletePopUp.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -54,4 +54,4 @@ export default function DeletePopUp({
</Dialog.Actions>
</Dialog>
);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -279,4 +279,4 @@ export default function DeletePopUpForOrphanNodes({
</Flex>
</div>
);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -126,4 +126,4 @@ export default function GraphEnhancementDialog({
</Dialog.Content>
</Dialog>
);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -100,4 +100,4 @@ const LargeFilesAlert: FC<LargefilesProps> = ({ largeFiles, handleToggle, checke
</Box>
);
};
export default LargeFilesAlert;
export default LargeFilesAlert;
5 changes: 4 additions & 1 deletion frontend/src/components/QuickStarter.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ const QuickStarter: React.FunctionComponent = () => {
const themeUtils = React.useContext(ThemeWrapperContext);
const [themeMode, setThemeMode] = useState<string>(themeUtils.colorMode);
const [showSettingsModal, setshowSettingsModal] = useState<boolean>(false);
const [showOrphanNodeDeletionDialog, setshowOrphanNodeDeletionDialog] = useState<boolean>(false);

const toggleColorMode = () => {
setThemeMode((prevThemeMode) => {
Expand All @@ -24,7 +25,6 @@ const QuickStarter: React.FunctionComponent = () => {
const closeSettingModal = () => {
setshowSettingsModal(false);
};

return (
<UserCredentialsWrapper>
<FileContextProvider>
Expand All @@ -35,6 +35,9 @@ const QuickStarter: React.FunctionComponent = () => {
openSettingsDialog={openSettingsModal}
isSettingPanelExpanded={showSettingsModal}
closeSettingModal={closeSettingModal}
closeOrphanNodeDeletionModal={closeOrphanNodeDeletionModal}
showOrphanNodeDeletionModal={showOrphanNodeDeletionDialog}
openOrphanNodeDeletionModal={openOrphanNodeDeletionModal}
/>
</AlertContextWrapper>
</MessageContextWrapper>
Expand Down