Skip to content

v0.8.1 fix release #1280

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 26 commits into from
May 12, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
f46ebc2
Dev to staging (#1070)
praveshkumar1988 Feb 12, 2025
77160ea
Dev (#1073)
prakriti-solankey Feb 13, 2025
703f307
Dev (#1085)
kartikpersistent Feb 13, 2025
5443c51
Update docker-compose.yml
kartikpersistent Feb 16, 2025
e7a4ee1
Merge branch 'main' into staging
prakriti-solankey Feb 17, 2025
f000501
Dev (#1095)
kartikpersistent Feb 17, 2025
e2d0039
Dev to staging minor fixes (#1103)
praveshkumar1988 Feb 20, 2025
bd54178
Merge branch 'main' into staging
kartikpersistent Feb 20, 2025
7818f6f
Dev to staging (#1108)
prakriti-solankey Feb 21, 2025
a005186
Dev (#1132)
prakriti-solankey Mar 6, 2025
56dacdd
bracket missing
prakriti-solankey Mar 6, 2025
baa914e
dev (#1156)
kartikpersistent Mar 6, 2025
a0fae04
Dev (#1172)
prakriti-solankey Mar 11, 2025
2bdddea
Merge branch 'main' into staging
prakriti-solankey Mar 11, 2025
83073de
Dev to Staging (#1210)
kaustubh-darekar Apr 1, 2025
834f68f
Update PageLayout.tsx
kartikpersistent Apr 4, 2025
01cac9c
queue type fix
kartikpersistent Apr 7, 2025
8bfb595
Update requirements.txt
karanchellani Apr 8, 2025
f2e72dc
Dev (#1218)
kartikpersistent Apr 17, 2025
9fc71b5
Dev (#1239)
kartikpersistent Apr 21, 2025
91b78b7
Dev (#1244)
kartikpersistent Apr 21, 2025
00109d3
Merge branch 'main' into staging
kartikpersistent Apr 21, 2025
2f8b31d
Dev to Staging (#1279)
praveshkumar1988 May 9, 2025
02de002
staging bug
prakriti-solankey May 9, 2025
437fc3d
Merge branch 'main' of https://github.com/neo4j-labs/llm-graph-builde…
prakriti-solankey May 12, 2025
21cf0ea
staging fix
prakriti-solankey May 12, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions backend/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,10 @@ RUN apt-get update && \
# Set LD_LIBRARY_PATH
ENV LD_LIBRARY_PATH=/usr/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH
# Copy requirements file and install Python dependencies
COPY requirements.txt /code/
COPY requirements.txt constraints.txt /code/
# --no-cache-dir --upgrade
RUN pip install -r requirements.txt
RUN pip install --upgrade pip
RUN pip install -r requirements.txt -c constraints.txt
# Copy application code
COPY . /code
# Set command
Expand Down
4 changes: 4 additions & 0 deletions backend/constraints.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
-f https://download.pytorch.org/whl/torch_stable.html
torch==2.3.1+cpu
torchvision==0.18.1+cpu
torchaudio==2.3.1+cpu
5 changes: 2 additions & 3 deletions backend/score.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,10 @@
from src.ragas_eval import *
from starlette.types import ASGIApp, Receive, Scope, Send
from langchain_neo4j import Neo4jGraph
from src.entities.source_node import sourceNode
from starlette.middleware.sessions import SessionMiddleware
from starlette.responses import HTMLResponse, RedirectResponse,JSONResponse
from starlette.requests import Request
import secrets
from dotenv import load_dotenv
load_dotenv(override=True)

logger = CustomLogger()
CHUNK_DIR = os.path.join(os.path.dirname(__file__), "chunks")
Expand Down
5 changes: 4 additions & 1 deletion backend/src/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,13 +180,16 @@ async def get_graph_document_list(
else:
node_properties = ["description"]
relationship_properties = ["description"]
TOOL_SUPPORTED_MODELS = {"qwen3", "deepseek"}
model_name = llm.model_name.lower()
ignore_tool_usage = not any(pattern in model_name for pattern in TOOL_SUPPORTED_MODELS)
llm_transformer = LLMGraphTransformer(
llm=llm,
node_properties=node_properties,
relationship_properties=relationship_properties,
allowed_nodes=allowedNodes,
allowed_relationships=allowedRelationship,
ignore_tool_usage=True,
ignore_tool_usage=ignore_tool_usage,
additional_instructions=ADDITIONAL_INSTRUCTIONS+ (additional_instructions if additional_instructions else "")
)

Expand Down
67 changes: 34 additions & 33 deletions backend/src/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -665,39 +665,40 @@ def upload_file(graph, model, chunk, chunk_number:int, total_chunks:int, origina
return f"Chunk {chunk_number}/{total_chunks} saved"

def get_labels_and_relationtypes(uri, userName, password, database):
excluded_labels = {'Document', 'Chunk', '_Bloom_Perspective_', '__Community__', '__Entity__', 'Session', 'Message'}
excluded_relationships = {
'PART_OF', 'NEXT_CHUNK', 'HAS_ENTITY', '_Bloom_Perspective_', 'FIRST_CHUNK',
'SIMILAR', 'IN_COMMUNITY', 'PARENT_COMMUNITY', 'NEXT', 'LAST_MESSAGE'}
driver = get_graphDB_driver(uri, userName, password,database)
with driver.session(database=database) as session:
result = session.run("CALL db.schema.visualization() YIELD nodes, relationships RETURN nodes, relationships")
if not result:
return []
record = result.single()
nodes = record["nodes"]
relationships = record["relationships"]
node_map = {}
for node in nodes:
node_id = node.element_id
labels = list(node.labels)
if labels:
node_map[node_id] = ":".join(labels)
triples = []
for rel in relationships:
start_id = rel.start_node.element_id
end_id = rel.end_node.element_id
rel_type = rel.type
start_label = node_map.get(start_id)
end_label = node_map.get(end_id)
if start_label and end_label:
if (
start_label not in excluded_labels and
end_label not in excluded_labels and
rel_type not in excluded_relationships
):
triples.append(f"{start_label}-{rel_type}->{end_label}")
return {"triplets" : list(set(triples))}
excluded_labels = {'Document', 'Chunk', '_Bloom_Perspective_', '__Community__', '__Entity__', 'Session', 'Message'}
excluded_relationships = {
'NEXT_CHUNK', '_Bloom_Perspective_', 'FIRST_CHUNK',
'SIMILAR', 'IN_COMMUNITY', 'PARENT_COMMUNITY', 'NEXT', 'LAST_MESSAGE'
}
driver = get_graphDB_driver(uri, userName, password,database)
triples = set()
with driver.session(database=database) as session:
result = session.run("""
MATCH (n)-[r]->(m)
RETURN DISTINCT labels(n) AS fromLabels, type(r) AS relType, labels(m) AS toLabels
""")
for record in result:
from_labels = record["fromLabels"]
to_labels = record["toLabels"]
rel_type = record["relType"]
from_label = next((lbl for lbl in from_labels if lbl not in excluded_labels), None)
to_label = next((lbl for lbl in to_labels if lbl not in excluded_labels), None)
if not from_label or not to_label:
continue
if rel_type == 'PART_OF':
if from_label == 'Chunk' and to_label == 'Document':
continue
elif rel_type == 'HAS_ENTITY':
if from_label == 'Chunk':
continue
elif (
from_label in excluded_labels or
to_label in excluded_labels or
rel_type in excluded_relationships
):
continue
triples.add(f"{from_label}-{rel_type}->{to_label}")
return {"triplets": list(triples)}

def manually_cancelled_job(graph, filenames, source_types, merged_dir, uri):

Expand Down
9 changes: 7 additions & 2 deletions backend/src/make_relationships.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from langchain_neo4j import Neo4jGraph
from langchain.docstore.document import Document
from src.shared.common_fn import load_embedding_model,execute_graph_query
from src.shared.common_fn import load_embedding_model,execute_graph_query
import logging
from typing import List
import os
Expand Down Expand Up @@ -34,6 +35,7 @@ def merge_relationship_between_chunk_and_entites(graph: Neo4jGraph, graph_docume
MERGE (c)-[:HAS_ENTITY]->(n)
"""
execute_graph_query(graph,unwind_query, params={"batch_data": batch_data})
execute_graph_query(graph,unwind_query, params={"batch_data": batch_data})


def create_chunk_embeddings(graph, chunkId_chunkDoc_list, file_name):
Expand All @@ -60,6 +62,7 @@ def create_chunk_embeddings(graph, chunkId_chunkDoc_list, file_name):
MERGE (c)-[:PART_OF]->(d)
"""
execute_graph_query(graph,query_to_create_embedding, params={"fileName":file_name, "data":data_for_query})
execute_graph_query(graph,query_to_create_embedding, params={"fileName":file_name, "data":data_for_query})

def create_relation_between_chunks(graph, file_name, chunks: List[Document])->list:
logging.info("creating FIRST_CHUNK and NEXT_CHUNK relationships between chunks")
Expand Down Expand Up @@ -128,6 +131,7 @@ def create_relation_between_chunks(graph, file_name, chunks: List[Document])->li
MERGE (c)-[:PART_OF]->(d)
"""
execute_graph_query(graph,query_to_create_chunk_and_PART_OF_relation, params={"batch_data": batch_data})
execute_graph_query(graph,query_to_create_chunk_and_PART_OF_relation, params={"batch_data": batch_data})

query_to_create_FIRST_relation = """
UNWIND $relationships AS relationship
Expand All @@ -137,6 +141,7 @@ def create_relation_between_chunks(graph, file_name, chunks: List[Document])->li
MERGE (d)-[:FIRST_CHUNK]->(c))
"""
execute_graph_query(graph,query_to_create_FIRST_relation, params={"f_name": file_name, "relationships": relationships})
execute_graph_query(graph,query_to_create_FIRST_relation, params={"f_name": file_name, "relationships": relationships})

query_to_create_NEXT_CHUNK_relation = """
UNWIND $relationships AS relationship
Expand All @@ -153,7 +158,7 @@ def create_relation_between_chunks(graph, file_name, chunks: List[Document])->li
def create_chunk_vector_index(graph):
start_time = time.time()
try:
vector_index_query = "SHOW INDEXES YIELD * WHERE labelsOrTypes = ['Chunk'] and type = 'VECTOR' AND name = 'vector' return options"
vector_index_query = "SHOW INDEXES YIELD name, type, labelsOrTypes, properties WHERE name = 'vector' AND type = 'VECTOR' AND 'Chunk' IN labelsOrTypes AND 'embedding' IN properties RETURN name"
vector_index = execute_graph_query(graph,vector_index_query)
if not vector_index:
vector_store = Neo4jVector(embedding=EMBEDDING_FUNCTION,
Expand All @@ -168,7 +173,7 @@ def create_chunk_vector_index(graph):
else:
logging.info(f"Index already exist,Skipping creation. Time taken: {time.time() - start_time:.2f} seconds")
except Exception as e:
if "EquivalentSchemaRuleAlreadyExists" in str(e):
if ("EquivalentSchemaRuleAlreadyExists" in str(e) or "An equivalent index already exists" in str(e)):
logging.info("Vector index already exists, skipping creation.")
else:
raise
32 changes: 18 additions & 14 deletions frontend/index.html
Original file line number Diff line number Diff line change
@@ -1,16 +1,20 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link rel="icon" type="image/png" sizes="32x32" href="/favicons/favicon-32x32.png">
<link rel="icon" type="image/png" sizes="194x194" href="/favicons/favicon-194x194.png">
<link rel="icon" type="image/png" sizes="16x16" href="/favicons/favicon-16x16.png">
<link rel="shortcut icon" href="/favicons/favicon.ico">
<title>Neo4j graph builder</title>
</head>
<body>
<div id="root"></div>
<script type="module" src="/src/main.tsx"></script>
</body>
</html>

<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link rel="icon" type="image/png" sizes="32x32" href="/favicons/favicon-32x32.png">
<link rel="icon" type="image/png" sizes="194x194" href="/favicons/favicon-194x194.png">
<link rel="icon" type="image/png" sizes="16x16" href="/favicons/favicon-16x16.png">
<link rel="shortcut icon" href="/favicons/favicon.ico">
<title>Neo4j graph builder</title>
</head>

<body>
<div id="root"></div>
<script type="module" src="/src/main.tsx"></script>

</body>

</html>
10 changes: 5 additions & 5 deletions frontend/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,17 +14,17 @@
},
"dependencies": {
"@auth0/auth0-react": "^2.2.4",
"@emotion/styled": "^11.11.0",
"@emotion/styled": "^11.14.0",
"@mui/material": "^5.15.10",
"@mui/styled-engine": "^7.0.1",
"@mui/styled-engine": "^7.0.2",
"@neo4j-devtools/word-color": "^0.0.8",
"@neo4j-ndl/base": "^3.2.9",
"@neo4j-ndl/react": "^3.2.18",
"@neo4j-nvl/base": "^0.3.6",
"@neo4j-nvl/react": "^0.3.7",
"@react-oauth/google": "^0.12.1",
"@tanstack/react-table": "^8.20.5",
"@types/uuid": "^9.0.7",
"@types/uuid": "^10.0.0",
"axios": "^1.8.4",
"clsx": "^2.1.1",
"eslint-plugin-react": "^7.37.4",
Expand All @@ -50,13 +50,13 @@
"eslint": "^8.45.0",
"eslint-config-prettier": "^10.1.1",
"eslint-plugin-react-hooks": "^5.1.0",
"eslint-plugin-react-refresh": "^0.4.19",
"eslint-plugin-react-refresh": "^0.4.20",
"husky": "^9.1.7",
"lint-staged": "^15.5.0",
"postcss": "^8.5.3",
"prettier": "^3.5.3",
"react-dropzone": "^14.3.8",
"tailwindcss": "^4.0.7",
"tailwindcss": "^4.1.5",
"typescript": "^5.7.3",
"vite": "^4.5.3"
}
Expand Down
17 changes: 9 additions & 8 deletions frontend/src/components/ChatBot/ChatInfoModal.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -81,10 +81,10 @@ const ChatInfoModal: React.FC<chatInfoMessage> = ({
error?.length
? 10
: mode === chatModeLables['global search+vector+fulltext']
? 7
: mode === chatModeLables.graph
? 4
: 3
? 7
: mode === chatModeLables.graph
? 4
: 3
);
const [, copy] = useCopyToClipboard();
const [copiedText, setcopiedText] = useState<boolean>(false);
Expand All @@ -97,15 +97,15 @@ const ChatInfoModal: React.FC<chatInfoMessage> = ({
multiModelMetrics.length > 0 && Object.keys(multiModelMetrics[0]).length > 4
? true
: multiModelMetrics.length > 0 && Object.keys(multiModelMetrics[0]).length <= 4
? false
: null
? false
: null
);
const [isAdditionalMetricsWithSingleMode, setIsAdditionalMetricsWithSingleMode] = useState<boolean | null>(
metricDetails != undefined && Object.keys(metricDetails).length > 3
? true
: metricDetails != undefined && Object.keys(metricDetails).length <= 3
? false
: null
? false
: null
);
const actions: React.ComponentProps<typeof IconButton<'button'>>[] = useMemo(
() => [
Expand Down Expand Up @@ -320,6 +320,7 @@ const ChatInfoModal: React.FC<chatInfoMessage> = ({
src={Neo4jRetrievalLogo}
style={{ width: isTablet ? 80 : 95, height: isTablet ? 80 : 95, marginRight: 10 }}
loading='lazy'
alt='Retrieval-logo'
/>
<div className='flex! flex-col'>
<Typography variant='h2'>Retrieval information</Typography>
Expand Down
13 changes: 8 additions & 5 deletions frontend/src/components/ChatBot/ChatOnlyComponent.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import { clearChatAPI } from '../../services/QnaAPI';
import { ChatProps, connectionState, Messages, UserCredentials } from '../../types';
import { getIsLoading } from '../../utils/Utils';
import ThemeWrapper from '../../context/ThemeWrapper';
import { SpotlightProvider } from '@neo4j-ndl/react';

const ChatContent: React.FC<ChatProps> = ({ chatMessages }) => {
const { clearHistoryData, messages, setMessages, setClearHistoryData, setIsDeleteChatLoading, isDeleteChatLoading } =
Expand Down Expand Up @@ -160,11 +161,13 @@ const ChatOnlyComponent: React.FC = () => {
return (
<ThemeWrapper>
<UserCredentialsWrapper>
<FileContextProvider>
<MessageContextWrapper>
<ChatContent chatMessages={chatMessages} />
</MessageContextWrapper>
</FileContextProvider>
<SpotlightProvider>
<FileContextProvider>
<MessageContextWrapper>
<ChatContent chatMessages={chatMessages} />
</MessageContextWrapper>
</FileContextProvider>
</SpotlightProvider>
</UserCredentialsWrapper>
</ThemeWrapper>
);
Expand Down
13 changes: 9 additions & 4 deletions frontend/src/components/ChatBot/ChunkInfo.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ const ChunkInfo: FC<ChunkProps> = ({ loading, chunks, mode }) => {
) : chunk?.url && chunk?.start_time ? (
<>
<div className='flex! flex-row justiy-between items-center gap-1'>
<img src={youtubelogo} width={20} height={20} className='mr-2' />
<img src={youtubelogo} width={20} height={20} className='mr-2' alt='youtube-source-logo' />
<TextLink
href={generateYouTubeLink(chunk?.url, chunk?.start_time)}
type={'external'}
Expand Down Expand Up @@ -120,7 +120,7 @@ const ChunkInfo: FC<ChunkProps> = ({ loading, chunks, mode }) => {
) : chunk?.url && new URL(chunk.url).host === 'wikipedia.org' ? (
<>
<div className='flex! flex-row justiy-between items-center gap-1'>
<img src={wikipedialogo} width={20} height={20} className='mr-2' />
<img src={wikipedialogo} width={20} height={20} className='mr-2' alt='wikipedia-source-logo' />
<Typography variant='subheading-medium'>{chunk?.fileName}</Typography>
</div>
{mode !== chatModeLables['global search+vector+fulltext'] &&
Expand All @@ -147,7 +147,7 @@ const ChunkInfo: FC<ChunkProps> = ({ loading, chunks, mode }) => {
) : chunk?.url && new URL(chunk.url).host === 'storage.googleapis.com' ? (
<>
<div className='flex! flex-row justiy-between items-center gap-1'>
<img src={gcslogo} width={20} height={20} className='mr-2' />
<img src={gcslogo} width={20} height={20} className='mr-2' alt='gcs-source-logo' />
<Typography variant='subheading-medium'>{chunk?.fileName}</Typography>
</div>
{mode !== chatModeLables['global search+vector+fulltext'] &&
Expand All @@ -172,7 +172,7 @@ const ChunkInfo: FC<ChunkProps> = ({ loading, chunks, mode }) => {
) : chunk?.url && chunk?.url.startsWith('s3://') ? (
<>
<div className='flex! flex-row justiy-between items-center gap-1'>
<img src={s3logo} width={20} height={20} className='mr-2' />
<img src={s3logo} width={20} height={20} className='mr-2' alt='s3-source-logo' />
<Typography variant='subheading-medium'>{chunk?.fileName}</Typography>
</div>
{mode !== chatModeLables['global search+vector+fulltext'] &&
Expand Down Expand Up @@ -264,6 +264,11 @@ const ChunkInfo: FC<ChunkProps> = ({ loading, chunks, mode }) => {
{chunk?.text}
</ReactMarkdown>
</div>
<div className='mt-2 prose prose-sm sm:prose lg:prose-lg xl:prose-xl max-w-none'>
<ReactMarkdown remarkPlugins={[remarkGfm]} rehypePlugins={[rehypeRaw] as any}>
{chunk?.text}
</ReactMarkdown>
</div>
</li>
))}
</ul>
Expand Down
Loading