Skip to content

Commit 21cf0ea

Browse files
staging fix
1 parent 437fc3d commit 21cf0ea

File tree

1 file changed

+0
-13
lines changed

1 file changed

+0
-13
lines changed

backend/src/main.py

Lines changed: 0 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@
2424
from src.make_relationships import *
2525
from src.document_sources.web_pages import *
2626
from src.graph_query import get_graphDB_driver
27-
from src.graph_query import get_graphDB_driver
2827
import re
2928
from langchain_community.document_loaders import WikipediaLoader, WebBaseLoader
3029
import warnings
@@ -402,7 +401,6 @@ async def processing_source(uri, userName, password, database, model, file_name,
402401
obj_source_node.processing_time = processed_time
403402
obj_source_node.processed_chunk = select_chunks_upto+select_chunks_with_retry
404403
if retry_condition == START_FROM_BEGINNING:
405-
result = execute_graph_query(graph,QUERY_TO_GET_NODES_AND_RELATIONS_OF_A_DOCUMENT, params={"filename":file_name})
406404
result = execute_graph_query(graph,QUERY_TO_GET_NODES_AND_RELATIONS_OF_A_DOCUMENT, params={"filename":file_name})
407405
obj_source_node.node_count = result[0]['nodes']
408406
obj_source_node.relationship_count = result[0]['rels']
@@ -506,10 +504,6 @@ async def processing_chunks(chunkId_chunkDoc_list,graph,uri, userName, password,
506504
logging.info(f'Time taken to create relationship between chunk and entities: {elapsed_relationship:.2f} seconds')
507505
latency_processing_chunk["relationship_between_chunk_entity"] = f'{elapsed_relationship:.2f}'
508506

509-
graphDb_data_Access = graphDBdataAccess(graph)
510-
count_response = graphDb_data_Access.update_node_relationship_count(file_name)
511-
node_count = count_response[file_name].get('nodeCount',"0")
512-
rel_count = count_response[file_name].get('relationshipCount',"0")
513507
graphDb_data_Access = graphDBdataAccess(graph)
514508
count_response = graphDb_data_Access.update_node_relationship_count(file_name)
515509
node_count = count_response[file_name].get('nodeCount',"0")
@@ -536,7 +530,6 @@ def get_chunkId_chunkDoc_list(graph, file_name, pages, token_chunk_size, chunk_o
536530
else:
537531
chunkId_chunkDoc_list=[]
538532
chunks = execute_graph_query(graph,QUERY_TO_GET_CHUNKS, params={"filename":file_name})
539-
chunks = execute_graph_query(graph,QUERY_TO_GET_CHUNKS, params={"filename":file_name})
540533

541534
if chunks[0]['text'] is None or chunks[0]['text']=="" or not chunks :
542535
raise LLMGraphBuilderException(f"Chunks are not created for {file_name}. Please re-upload file and try again.")
@@ -548,13 +541,11 @@ def get_chunkId_chunkDoc_list(graph, file_name, pages, token_chunk_size, chunk_o
548541
if retry_condition == START_FROM_LAST_PROCESSED_POSITION:
549542
logging.info(f"Retry : start_from_last_processed_position")
550543
starting_chunk = execute_graph_query(graph,QUERY_TO_GET_LAST_PROCESSED_CHUNK_POSITION, params={"filename":file_name})
551-
starting_chunk = execute_graph_query(graph,QUERY_TO_GET_LAST_PROCESSED_CHUNK_POSITION, params={"filename":file_name})
552544

553545
if starting_chunk and starting_chunk[0]["position"] < len(chunkId_chunkDoc_list):
554546
return len(chunks), chunkId_chunkDoc_list[starting_chunk[0]["position"] - 1:]
555547

556548
elif starting_chunk and starting_chunk[0]["position"] == len(chunkId_chunkDoc_list):
557-
starting_chunk = execute_graph_query(graph,QUERY_TO_GET_LAST_PROCESSED_CHUNK_WITHOUT_ENTITY, params={"filename":file_name})
558549
starting_chunk = execute_graph_query(graph,QUERY_TO_GET_LAST_PROCESSED_CHUNK_WITHOUT_ENTITY, params={"filename":file_name})
559550
return len(chunks), chunkId_chunkDoc_list[starting_chunk[0]["position"] - 1:]
560551

@@ -734,7 +725,6 @@ def manually_cancelled_job(graph, filenames, source_types, merged_dir, uri):
734725
delete_uploaded_local_file(merged_file_path,file_name)
735726
return "Cancelled the processing job successfully"
736727

737-
def populate_graph_schema_from_text(text, model, is_schema_description_checked, is_local_storage):
738728
def populate_graph_schema_from_text(text, model, is_schema_description_checked, is_local_storage):
739729
"""_summary_
740730
@@ -748,8 +738,6 @@ def populate_graph_schema_from_text(text, model, is_schema_description_checked,
748738
"""
749739
result = schema_extraction_from_text(text, model, is_schema_description_checked, is_local_storage)
750740
return result
751-
result = schema_extraction_from_text(text, model, is_schema_description_checked, is_local_storage)
752-
return result
753741

754742
def set_status_retry(graph, file_name, retry_condition):
755743
graphDb_data_Access = graphDBdataAccess(graph)
@@ -762,7 +750,6 @@ def set_status_retry(graph, file_name, retry_condition):
762750
if retry_condition == DELETE_ENTITIES_AND_START_FROM_BEGINNING or retry_condition == START_FROM_BEGINNING:
763751
obj_source_node.processed_chunk=0
764752
if retry_condition == DELETE_ENTITIES_AND_START_FROM_BEGINNING:
765-
execute_graph_query(graph,QUERY_TO_DELETE_EXISTING_ENTITIES, params={"filename":file_name})
766753
execute_graph_query(graph,QUERY_TO_DELETE_EXISTING_ENTITIES, params={"filename":file_name})
767754
obj_source_node.node_count=0
768755
obj_source_node.relationship_count=0

0 commit comments

Comments
 (0)