Skip to content

source list api showing files from different dbs #1283

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
May 12, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 0 additions & 8 deletions backend/score.py
Original file line number Diff line number Diff line change
Expand Up @@ -341,14 +341,6 @@ async def get_source_list(
"""
try:
start = time.time()
# if password is not None and password != "null":
# decoded_password = decode_password(password)
# else:
# decoded_password = None
# userName = None
# database = None
# if " " in uri:
# uri = uri.replace(" ","+")
result = await asyncio.to_thread(get_source_list_from_graph,uri,userName,password,database)
end = time.time()
elapsed_time = end - start
Expand Down
32 changes: 16 additions & 16 deletions backend/src/graphDB_dataAccess.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,10 @@ def update_exception_db(self, file_name, exp_msg, retry_condition=None):
if retry_condition is not None:
retry_condition = None
self.graph.query("""MERGE(d:Document {fileName :$fName}) SET d.status = $status, d.errorMessage = $error_msg, d.retry_condition = $retry_condition""",
{"fName":file_name, "status":job_status, "error_msg":exp_msg, "retry_condition":retry_condition})
{"fName":file_name, "status":job_status, "error_msg":exp_msg, "retry_condition":retry_condition},session_params={"database":self.graph._database})
else :
self.graph.query("""MERGE(d:Document {fileName :$fName}) SET d.status = $status, d.errorMessage = $error_msg""",
{"fName":file_name, "status":job_status, "error_msg":exp_msg})
{"fName":file_name, "status":job_status, "error_msg":exp_msg},session_params={"database":self.graph._database})
except Exception as e:
error_message = str(e)
logging.error(f"Error in updating document node status as failed: {error_message}")
Expand Down Expand Up @@ -66,7 +66,7 @@ def create_source_node(self, obj_source_node:sourceNode):
"entityEntityRelCount":obj_source_node.entityEntityRelCount,
"communityNodeCount":obj_source_node.communityNodeCount,
"communityRelCount":obj_source_node.communityRelCount
})
},session_params={"database":self.graph._database})
except Exception as e:
error_message = str(e)
logging.info(f"error_message = {error_message}")
Expand Down Expand Up @@ -118,7 +118,7 @@ def update_source_node(self, obj_source_node:sourceNode):
logging.info(f'Base Param value 1 : {param}')
query = "MERGE(d:Document {fileName :$props.fileName}) SET d += $props"
logging.info("Update source node properties")
self.graph.query(query,param)
self.graph.query(query,param,session_params={"database":self.graph._database})
except Exception as e:
error_message = str(e)
self.update_exception_db(self,self.file_name,error_message)
Expand All @@ -139,15 +139,15 @@ def get_source_list(self):
"""
logging.info("Get existing files list from graph")
query = "MATCH(d:Document) WHERE d.fileName IS NOT NULL RETURN d ORDER BY d.updatedAt DESC"
result = self.graph.query(query)
result = self.graph.query(query,session_params={"database":self.graph._database})
list_of_json_objects = [entry['d'] for entry in result]
return list_of_json_objects

def update_KNN_graph(self):
"""
Update the graph node with SIMILAR relationship where embedding scrore match
"""
index = self.graph.query("""show indexes yield * where type = 'VECTOR' and name = 'vector'""")
index = self.graph.query("""show indexes yield * where type = 'VECTOR' and name = 'vector'""",session_params={"database":self.graph._database})
# logging.info(f'show index vector: {index}')
knn_min_score = os.environ.get('KNN_MIN_SCORE')
if len(index) > 0:
Expand All @@ -158,14 +158,14 @@ def update_KNN_graph(self):
WHERE node <> c and score >= $score MERGE (c)-[rel:SIMILAR]-(node) SET rel.score = score
""",
{"score":float(knn_min_score)}
)
,session_params={"database":self.graph._database})
else:
logging.info("Vector index does not exist, So KNN graph not update")

def check_account_access(self, database):
try:
query_dbms_componenet = "call dbms.components() yield edition"
result_dbms_componenet = self.graph.query(query_dbms_componenet)
result_dbms_componenet = self.graph.query(query_dbms_componenet,session_params={"database":self.graph._database})

if result_dbms_componenet[0]["edition"] == "enterprise":
query = """
Expand All @@ -177,7 +177,7 @@ def check_account_access(self, database):

logging.info(f"Checking access for database: {database}")

result = self.graph.query(query, params={"database": database})
result = self.graph.query(query, params={"database": database},session_params={"database":self.graph._database})
read_access_count = result[0]["readAccessCount"] if result else 0

logging.info(f"Read access count: {read_access_count}")
Expand All @@ -202,7 +202,7 @@ def check_gds_version(self):
gds_procedure_count = """
SHOW FUNCTIONS YIELD name WHERE name STARTS WITH 'gds.version' RETURN COUNT(*) AS totalGdsProcedures
"""
result = self.graph.query(gds_procedure_count)
result = self.graph.query(gds_procedure_count,session_params={"database":self.graph._database})
total_gds_procedures = result[0]['totalGdsProcedures'] if result else 0

if total_gds_procedures > 0:
Expand Down Expand Up @@ -231,11 +231,11 @@ def connection_check_and_get_vector_dimensions(self,database):
db_vector_dimension = self.graph.query("""SHOW INDEXES YIELD *
WHERE type = 'VECTOR' AND name = 'vector'
RETURN options.indexConfig['vector.dimensions'] AS vector_dimensions
""")
""",session_params={"database":self.graph._database})

result_chunks = self.graph.query("""match (c:Chunk) return size(c.embedding) as embeddingSize, count(*) as chunks,
count(c.embedding) as hasEmbedding
""")
""",session_params={"database":self.graph._database})

embedding_model = os.getenv('EMBEDDING_MODEL')
embeddings, application_dimension = load_embedding_model(embedding_model)
Expand All @@ -260,7 +260,7 @@ def execute_query(self, query, param=None,max_retries=3, delay=2):
retries = 0
while retries < max_retries:
try:
return self.graph.query(query, param)
return self.graph.query(query, param,session_params={"database":self.graph._database})
except TransientError as e:
if "DeadlockDetected" in str(e):
retries += 1
Expand Down Expand Up @@ -473,8 +473,8 @@ def drop_create_vector_index(self, isVectorIndexExist):
embeddings, dimension = load_embedding_model(embedding_model)

if isVectorIndexExist == 'true':
self.graph.query("""drop index vector""")
# self.graph.query("""drop index vector""")
self.graph.query("""drop index vector""",session_params={"database":self.graph._database})

self.graph.query("""CREATE VECTOR INDEX `vector` if not exists for (c:Chunk) on (c.embedding)
OPTIONS {indexConfig: {
`vector.dimensions`: $dimensions,
Expand All @@ -483,7 +483,7 @@ def drop_create_vector_index(self, isVectorIndexExist):
""",
{
"dimensions" : dimension
}
},session_params={"database":self.graph._database}
)
return "Drop and Re-Create vector index succesfully"

Expand Down