Skip to content

Commit 7e944e1

Browse files
Merge pull request #73 from kunalverma2512/feature/conversation-history-rag-enhancement
conversation-history-rag-enhancement
2 parents f7890c5 + 179e176 commit 7e944e1

File tree

76 files changed

+4768
-450
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

76 files changed

+4768
-450
lines changed

.env

126 Bytes
Binary file not shown.

WellnessResourceHub.py

Lines changed: 0 additions & 140 deletions
This file was deleted.

backend/app/models/schemas.py

Lines changed: 62 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
from pydantic import BaseModel, Field
22
from typing import List, Optional, Dict, Any
33

4+
# Existing schemas (keep unchanged)
45
class IngestionRequest(BaseModel):
56
file_paths: List[str] = Field(..., description="List of file paths to ingest")
67
chunking_strategy: str = Field("medical", description="Chunking strategy")
@@ -14,21 +15,74 @@ class IngestionResponse(BaseModel):
1415
documents_stored: int
1516
error: Optional[str] = None
1617

18+
class SystemStatusResponse(BaseModel):
19+
status: str
20+
vector_store_connected: bool
21+
embedding_model_loaded: bool
22+
documents_count: int
23+
system_ready: bool
24+
25+
# UPDATED: Enhanced MedicalQueryRequest with session support
1726
class MedicalQueryRequest(BaseModel):
1827
query: str = Field(..., description="Medical query", min_length=1)
1928
max_chunks: Optional[int] = Field(3, description="Max chunks to retrieve")
29+
session_id: Optional[str] = Field(None, description="Conversation session ID")
2030

31+
# UPDATED: Enhanced MedicalQueryResponse with session info
2132
class MedicalQueryResponse(BaseModel):
2233
success: bool
2334
query: str
2435
response: str
25-
sources: List[str]
26-
processing_time: float
36+
session_id: str
37+
conversation_context_used: bool = Field(False, description="Whether conversation context was used")
38+
sources: List[str] = []
39+
urgency_level: str = "routine"
40+
chunks_used: int = 0
41+
generation_time: float = 0.0
42+
model_used: str = "gemini-1.5-flash"
43+
safety_validated: bool = True
44+
emergency_detected: bool = False
45+
timestamp: str
46+
processing_time: float = 0.0 # For backward compatibility
2747
error: Optional[str] = None
2848

29-
class SystemStatusResponse(BaseModel):
30-
status: str
31-
vector_store_connected: bool
32-
embedding_model_loaded: bool
33-
documents_count: int
34-
system_ready: bool
49+
# NEW: Session management schemas
50+
class SessionCreateResponse(BaseModel):
51+
session_id: str
52+
message: str = "New conversation session created"
53+
54+
class SessionHistoryResponse(BaseModel):
55+
session_id: str
56+
messages: List[Dict[str, Any]]
57+
message_count: int
58+
created_at: str
59+
updated_at: str
60+
61+
class SessionStatusResponse(BaseModel):
62+
session_id: str
63+
exists: bool
64+
message_count: int = 0
65+
last_activity: Optional[str] = None
66+
67+
class ChatMessage(BaseModel):
68+
role: str # 'user' or 'assistant'
69+
content: str
70+
timestamp: str
71+
metadata: Optional[Dict[str, Any]] = None
72+
73+
# NEW: Unified chat request (works with or without session)
74+
class ChatRequest(BaseModel):
75+
query: str = Field(..., min_length=1, max_length=1000, description="User's message")
76+
session_id: Optional[str] = Field(None, description="Optional session ID for conversation continuity")
77+
max_chunks: Optional[int] = Field(3, description="Max retrieval chunks")
78+
79+
class ChatResponse(BaseModel):
80+
success: bool
81+
query: str
82+
response: str
83+
session_id: str
84+
conversation_context_used: bool = False
85+
sources: List[str] = []
86+
generation_time: float
87+
timestamp: str
88+
error: Optional[str] = None

0 commit comments

Comments
 (0)