Vela commited on
Commit
d091eda
·
1 Parent(s): 4a7a05f

modified docs strings

Browse files
src/backend/__pycache__/main.cpython-313.pyc CHANGED
Binary files a/src/backend/__pycache__/main.cpython-313.pyc and b/src/backend/__pycache__/main.cpython-313.pyc differ
 
src/backend/api_routes/__pycache__/chat_api.cpython-313.pyc CHANGED
Binary files a/src/backend/api_routes/__pycache__/chat_api.cpython-313.pyc and b/src/backend/api_routes/__pycache__/chat_api.cpython-313.pyc differ
 
src/backend/api_routes/__pycache__/chat_history_db_api.cpython-313.pyc CHANGED
Binary files a/src/backend/api_routes/__pycache__/chat_history_db_api.cpython-313.pyc and b/src/backend/api_routes/__pycache__/chat_history_db_api.cpython-313.pyc differ
 
src/backend/api_routes/__pycache__/chat_history_supabase_api.cpython-313.pyc ADDED
Binary file (5.19 kB). View file
 
src/backend/api_routes/__pycache__/knowledge_base_api.cpython-313.pyc CHANGED
Binary files a/src/backend/api_routes/__pycache__/knowledge_base_api.cpython-313.pyc and b/src/backend/api_routes/__pycache__/knowledge_base_api.cpython-313.pyc differ
 
src/backend/api_routes/chat_api.py CHANGED
@@ -1,6 +1,7 @@
1
- from fastapi import APIRouter, HTTPException, Depends
2
- from fastapi.responses import JSONResponse
3
- from services import llm_model_service, pinecone_service, embedding_service
 
4
  from services.schemas import ConversationInput
5
  from utils import logger
6
 
@@ -8,15 +9,38 @@ logger = logger.get_logger()
8
 
9
  router = APIRouter(prefix="/chat", tags=["Chat"])
10
 
11
- @router.post("/get-health-advice", response_model=dict)
12
- async def get_health_advice(input_data: ConversationInput):
13
  """
14
- Handles requests from the frontend and fetches advice using the `get_health_advice()` function.
15
 
16
- Args:
17
- - input_data (ConversationInput): User's conversation history.
 
 
 
18
 
19
- Example Input:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  {
21
  "conversation_history": [
22
  {"role": "user", "content": "I've been feeling tired lately. What should I do?"},
@@ -24,50 +48,56 @@ async def get_health_advice(input_data: ConversationInput):
24
  {"role": "user", "content": "No, I just feel drained even after sleeping well."}
25
  ]
26
  }
 
27
 
28
- Returns:
29
- - dict: Contains 'reply' with the assistant's response.
30
 
31
- Raises:
32
- - HTTPException (400): If conversation history or user query is missing.
33
- - HTTPException (500): If an internal error occurs during response generation.
 
 
 
 
 
 
 
 
 
 
 
 
34
  """
 
35
  if not input_data.conversation_history:
36
  logger.warning("Empty conversation history received.")
37
- raise HTTPException(status_code=400, detail="Conversation history cannot be empty.")
38
-
39
- try:
40
- last_entry = input_data.conversation_history[-1]
41
- if not isinstance(last_entry, dict) or last_entry.get("role") != "user":
42
- logger.warning("Invalid conversation entry format or missing user query.")
43
- raise HTTPException(status_code=400, detail="Invalid conversation entry or missing user query.")
44
-
45
- user_query = last_entry.get("content")
46
- if not user_query:
47
- logger.warning("User query content is missing in the conversation history.")
48
- raise HTTPException(status_code=400, detail="User query content cannot be empty.")
49
 
50
- logger.info(f"Received user query: {user_query}")
51
- query_embeddings = embedding_service.get_text_embedding(user_query)
52
 
53
- db_response = pinecone_service.retrieve_context_from_pinecone(query_embeddings)
54
- logger.info("Fetched DB response successfully.")
 
 
 
 
55
 
56
- assistant_reply = llm_model_service.get_health_advice(
 
 
 
57
  user_query, db_response, input_data.conversation_history
58
  )
59
-
60
- if not assistant_reply:
61
- logger.warning("Assistant generated an empty response.")
62
- raise HTTPException(status_code=500, detail="Assistant generated an empty response.")
63
-
64
- logger.info("Health advice generated successfully.")
65
- return JSONResponse(content={"reply": assistant_reply}, status_code=200)
66
-
67
- except HTTPException as http_exc:
68
- logger.error(f"HTTPException occurred: {http_exc.detail}")
69
- raise http_exc
70
 
71
  except Exception as e:
72
  logger.error(f"Unexpected error: {e}", exc_info=True)
73
- raise HTTPException(status_code=500, detail="Error generating response. Please try again later.")
 
 
 
 
1
+ from fastapi import APIRouter, HTTPException, status, Depends
2
+ from services.embedding_service import get_text_embedding
3
+ from services.pinecone_service import retrieve_context_from_pinecone
4
+ from services.llm_model_service import get_health_advice
5
  from services.schemas import ConversationInput
6
  from utils import logger
7
 
 
9
 
10
  router = APIRouter(prefix="/chat", tags=["Chat"])
11
 
12
+ @router.post("/get-health-advice", response_model=dict, status_code=status.HTTP_200_OK)
13
+ async def get_health_advice_endpoint(input_data: ConversationInput):
14
  """
15
+ Provides personalized health advice based on the user's conversation history.
16
 
17
+ ### Overview
18
+ This endpoint is designed to generate meaningful health advice by leveraging
19
+ both the user's most recent query and the conversation history. It ensures
20
+ the LLM model is aware of past interactions to maintain context and provide
21
+ relevant recommendations.
22
 
23
+ ### Process Flow
24
+ 1. **Extract User Query:**
25
+ - Retrieves the most recent entry from the provided conversation history.
26
+ - Ensures the entry is valid and contains a user's question.
27
+
28
+ 2. **Generate Query Embedding:**
29
+ - Uses the `get_text_embedding` service to generate vector embeddings for the extracted query.
30
+
31
+ 3. **Retrieve Contextual Information:**
32
+ - Uses the `retrieve_context_from_pinecone` service to fetch relevant context
33
+ based on the generated embeddings.
34
+
35
+ 4. **Generate Assistant Reply:**
36
+ - Passes the extracted query, retrieved context, and full conversation history to the LLM model.
37
+ - The LLM utilizes this information to provide a context-aware and personalized response.
38
+
39
+ ### Request Body
40
+ - **conversation_history** (List[dict]): List of chat entries representing the conversation flow.
41
+
42
+ **Example Request:**
43
+ ```json
44
  {
45
  "conversation_history": [
46
  {"role": "user", "content": "I've been feeling tired lately. What should I do?"},
 
48
  {"role": "user", "content": "No, I just feel drained even after sleeping well."}
49
  ]
50
  }
51
+ ```
52
 
53
+ ### Response
54
+ - **reply** (str): The assistant's response containing tailored health advice.
55
 
56
+ **Example Response:**
57
+ ```json
58
+ {
59
+ "reply": "You might consider checking your vitamin levels and maintaining a consistent sleep schedule."
60
+ }
61
+ ```
62
+
63
+ ### Error Handling
64
+ - **400 Bad Request:** Raised if the conversation history is empty or the latest user query is missing/invalid.
65
+ - **500 Internal Server Error:** Raised if an unexpected error occurs while generating the response.
66
+
67
+ ### Notes
68
+ - Ensure that the conversation history follows a proper role-based structure (`role: "user"` and `role: "assistant"`).
69
+ - The LLM's response quality heavily depends on the completeness and relevance of the conversation history.
70
+ - The embedding and context retrieval services are essential to enhance the accuracy of the generated advice.
71
  """
72
+
73
  if not input_data.conversation_history:
74
  logger.warning("Empty conversation history received.")
75
+ raise HTTPException(
76
+ status_code=status.HTTP_400_BAD_REQUEST,
77
+ detail="Conversation history cannot be empty."
78
+ )
 
 
 
 
 
 
 
 
79
 
80
+ last_entry = input_data.conversation_history[-1]
81
+ user_query = last_entry.get("content")
82
 
83
+ if last_entry.get("role") != "user" or not user_query:
84
+ logger.warning("Invalid or missing user query in conversation history.")
85
+ raise HTTPException(
86
+ status_code=status.HTTP_400_BAD_REQUEST,
87
+ detail="Invalid or missing user query."
88
+ )
89
 
90
+ try:
91
+ query_embeddings = get_text_embedding(user_query)
92
+ db_response = retrieve_context_from_pinecone(query_embeddings)
93
+ assistant_reply = get_health_advice(
94
  user_query, db_response, input_data.conversation_history
95
  )
96
+ return {"reply": assistant_reply}
 
 
 
 
 
 
 
 
 
 
97
 
98
  except Exception as e:
99
  logger.error(f"Unexpected error: {e}", exc_info=True)
100
+ raise HTTPException(
101
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
102
+ detail="Error generating response. Please try again later."
103
+ )
src/backend/api_routes/chat_history_db_api.py DELETED
@@ -1,34 +0,0 @@
1
- from fastapi import APIRouter,HTTPException,status,Query
2
- from services.schemas import ChatHistoryRequest
3
- from services import supabase_service
4
- from utils import logger
5
-
6
- logger = logger.get_logger()
7
-
8
- router = APIRouter(prefix='/chat-db',tags=["Chat History Database API's"])
9
-
10
- @router.post('/store-history')
11
- def store_chat_history(chat_history : ChatHistoryRequest):
12
- try:
13
- conversation_id = chat_history.conversation_id
14
- messages = chat_history.messages
15
- return supabase_service.store_chat_history(conversation_id, messages)
16
- except Exception as e:
17
- raise f"Failed to create {e}"
18
-
19
- @router.get('/get-history')
20
- def get_chat_history(conversation_id: str):
21
- """Retrieves chat history from Supabase for a given conversation ID."""
22
- try:
23
- chat_history = supabase_service.get_chat_history(conversation_id)
24
- return chat_history
25
- except Exception as e:
26
- logger.error(f"Error retrieving chat history for ID {conversation_id}: {e}")
27
- raise HTTPException(
28
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
29
- detail="Failed to retrieve chat history. Please try again later."
30
- )
31
-
32
-
33
-
34
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
src/backend/api_routes/chat_history_supabase_api.py ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Dict, Any, Union
2
+ from fastapi import APIRouter, HTTPException, status, Query
3
+ from services.schemas import ChatHistoryRequest
4
+ from services import supabase_service
5
+ from utils import logger
6
+
7
+ logger = logger.get_logger()
8
+
9
+ router = APIRouter(
10
+ prefix='/chat-history',
11
+ tags=["Chat History Management"]
12
+ )
13
+
14
+ @router.post('/store', response_model=Dict[str, Any], status_code=status.HTTP_201_CREATED)
15
+ def add_chat_history(chat_history: ChatHistoryRequest) -> Dict[str, Any]:
16
+ """
17
+ Save chat conversation history in the database.
18
+
19
+ **Request Body:**
20
+ - `conversation_id` (str): Unique identifier for the chat session.
21
+ - `messages` (List[Dict[str, str]]): List of messages exchanged during the session.
22
+
23
+ **Responses:**
24
+ - **201 Created**: Successfully stored the chat history.
25
+ - **400 Bad Request**: Input data validation error.
26
+ - **500 Internal Server Error**: Unexpected error during the saving process.
27
+ """
28
+ try:
29
+ response = supabase_service.store_chat_history(
30
+ chat_history.conversation_id,
31
+ chat_history.messages
32
+ )
33
+ if not response['success']:
34
+ raise HTTPException(
35
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
36
+ detail=response.get("error", "Failed to store chat history.")
37
+ )
38
+ return response
39
+ except ValueError as error:
40
+ logger.error(f"Validation error while storing chat history: {error}")
41
+ raise HTTPException(
42
+ status_code=status.HTTP_400_BAD_REQUEST,
43
+ detail=str(error)
44
+ )
45
+ except Exception as error:
46
+ logger.error(f"Unexpected error while storing chat history: {error}")
47
+ raise HTTPException(
48
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
49
+ detail="Unexpected error occurred. Please try again later."
50
+ )
51
+
52
+ @router.get('/retrieve', response_model=Union[Dict[str, Any], None])
53
+ def get_chat_history(
54
+ conversation_id: str = Query(..., description="Conversation ID for chat history retrieval")
55
+ ) -> Union[Dict[str, Any], None]:
56
+ """
57
+ Retrieve stored chat conversation history using a conversation ID.
58
+
59
+ **Query Parameter:**
60
+ - `conversation_id` (str): Unique identifier for the chat session.
61
+
62
+ **Responses:**
63
+ - **200 OK**: Successfully retrieved the chat history.
64
+ - **404 Not Found**: No chat history found for the provided conversation ID.
65
+ - **500 Internal Server Error**: Unexpected error occurred during retrieval.
66
+ """
67
+ try:
68
+ chat_history = supabase_service.retrieve_chat_history(conversation_id)
69
+ logger.info(f"Retrieved history: {chat_history}")
70
+
71
+ if not chat_history.get('success'):
72
+ error_message = chat_history.get('error', "Unknown error occurred.")
73
+ logger.warning(f"[404] Chat history not found for ID: {conversation_id} - {error_message}")
74
+ raise HTTPException(
75
+ status_code=status.HTTP_404_NOT_FOUND,
76
+ detail=f"Chat history not found for ID: {conversation_id}"
77
+ )
78
+
79
+ logger.info(f"Chat history retrieved successfully for ID: {conversation_id}")
80
+ return chat_history
81
+
82
+ except KeyError as key_error:
83
+ logger.error(f"[500] Missing key in response data for ID {conversation_id}: {key_error}")
84
+ raise HTTPException(
85
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
86
+ detail="Internal data structure error. Please contact support."
87
+ )
88
+
89
+ except ConnectionError:
90
+ logger.error(f"[500] Database connection error while retrieving ID {conversation_id}")
91
+ raise HTTPException(
92
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
93
+ detail="Failed to connect to the database. Please try again later."
94
+ )
95
+
96
+ except Exception as error:
97
+ logger.error(f"[500] Unexpected error while retrieving chat history for ID {conversation_id}: {error}")
98
+ raise HTTPException(
99
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
100
+ detail="Unexpected error occurred while retrieving chat history. Please try again later."
101
+ )
src/backend/api_routes/knowledge_base_api.py CHANGED
@@ -1,58 +1,127 @@
1
- from fastapi import APIRouter,HTTPException
2
- from services import pinecone_service,embedding_service
3
- from services.schemas import UpsertRequest,DeleteRequest,MetadataRequest
4
  import pandas as pd
5
  from utils import logger
 
6
 
7
  logger = logger.get_logger()
8
 
9
  router = APIRouter(prefix="/knowledge-base", tags=['Knowledge Base Operations'])
10
 
11
- @router.post("/upsert-data")
12
  def upsert_data(request: UpsertRequest):
13
 
14
  """
15
- Example Input :
16
- {
17
- "data": [{"input": "What is mental health?", "output": "Mental health refers to...", "instruction": "Focus on general well-being."}]
18
- }
19
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  """
21
  try:
 
 
22
  df = pd.DataFrame(request.data)
 
 
23
  pinecone_service.upsert_vector_data(df)
24
- return {"message": "Data upserted successfully."}
 
 
 
 
25
  except Exception as e:
26
- raise HTTPException(status_code=500, detail=f"Failed to upsert data: {e}")
 
27
 
28
- @router.post("/delete-records")
29
  def delete_records(request: DeleteRequest):
30
  """
31
- Example Input :
 
 
 
32
  {"ids_to_delete": ["id_123", "id_456"]}
 
33
 
 
 
 
 
34
  """
35
  try:
 
 
 
36
  pinecone_service.delete_records_by_ids(request.ids_to_delete)
37
- return {"message": "Records deleted successfully."}
 
 
 
 
 
 
38
  except Exception as e:
39
- raise HTTPException(status_code=500, detail=f"Failed to delete records: {e}")
 
40
 
41
- @router.post("/fetch-metadata")
42
  def fetch_metadata(request: MetadataRequest):
43
 
44
  """
45
- Example Input :
46
- {"prompt": "Tell me about mental health",
 
 
 
 
47
  "n_result": 3,
48
- "score_threshold": 0.47}
 
 
 
 
 
 
 
49
  """
50
  try:
51
- prompt = request.prompt
52
- logger.info(f"Given prompt : {prompt}")
53
- # prompt = prompt[-1] if isinstance(prompt, list) else prompt
54
- embedding = embedding_service.get_text_embedding(prompt)
55
- metadata = pinecone_service.retrieve_relevant_metadata(embedding, prompt, request.n_result, request.score_threshold)
56
- return {"metadata": metadata}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57
  except Exception as e:
58
- raise HTTPException(status_code=500, detail=f"Failed to fetch metadata: {e}")
 
 
1
+ from fastapi import APIRouter, HTTPException
2
+ from services import pinecone_service, embedding_service
3
+ from services.schemas import UpsertRequest, DeleteRequest, MetadataRequest
4
  import pandas as pd
5
  from utils import logger
6
+ from fastapi.responses import JSONResponse
7
 
8
  logger = logger.get_logger()
9
 
10
  router = APIRouter(prefix="/knowledge-base", tags=['Knowledge Base Operations'])
11
 
12
+ @router.post("/upsert-data", response_model=dict, status_code=200)
13
  def upsert_data(request: UpsertRequest):
14
 
15
  """
16
+ Upserts data into the knowledge base.
 
 
 
17
 
18
+ ### Example Input:
19
+ ```json
20
+ {
21
+ "data": [
22
+ {
23
+ "input": "What is mental health?",
24
+ "output": "Mental health refers to...",
25
+ "instruction": "Focus on general well-being."
26
+ }
27
+ ]
28
+ }
29
+ ```
30
+
31
+ ### Response:
32
+ - **200:** Data upserted successfully.
33
+ - **500:** Internal server error.
34
  """
35
  try:
36
+ if not request.data:
37
+ raise HTTPException(status_code=400, detail="Data cannot be empty.")
38
  df = pd.DataFrame(request.data)
39
+ if df.empty:
40
+ raise HTTPException(status_code=400, detail="No valid data provided for upsert.")
41
  pinecone_service.upsert_vector_data(df)
42
+ return JSONResponse(content={"message": "Data upserted successfully."}, status_code=200)
43
+ except (ValueError, KeyError) as e:
44
+ logger.error(f"Invalid data format: {e}")
45
+ raise HTTPException(status_code=400, detail=f"Invalid data format: {e}")
46
+
47
  except Exception as e:
48
+ logger.error(f"Unexpected error during data upsert: {e}")
49
+ raise HTTPException(status_code=500, detail="Failed to upsert data due to an unexpected error.")
50
 
51
+ @router.post("/delete-records", response_model=dict, status_code=200)
52
  def delete_records(request: DeleteRequest):
53
  """
54
+ Deletes records from the knowledge base.
55
+
56
+ ### Example Input:
57
+ ```json
58
  {"ids_to_delete": ["id_123", "id_456"]}
59
+ ```
60
 
61
+ ### Response:
62
+ - **200:** Records deleted successfully.
63
+ - **400:** No valid IDs provided.
64
+ - **500:** Internal server error.
65
  """
66
  try:
67
+ if not request.ids_to_delete:
68
+ raise HTTPException(status_code=400, detail="IDs to delete cannot be empty.")
69
+
70
  pinecone_service.delete_records_by_ids(request.ids_to_delete)
71
+ logger.info(f"Successfully deleted records: {request.ids_to_delete}")
72
+ return JSONResponse(content={"message": "Records deleted successfully."}, status_code=200)
73
+
74
+ except (ValueError, KeyError) as e:
75
+ logger.error(f"Invalid data format for deletion: {e}")
76
+ raise HTTPException(status_code=400, detail=f"Invalid data format: {e}")
77
+
78
  except Exception as e:
79
+ logger.error(f"Unexpected error while deleting records: {e}")
80
+ raise HTTPException(status_code=500, detail="Failed to delete records due to an unexpected error.")
81
 
82
+ @router.post("/fetch-metadata", response_model=dict, status_code=200)
83
  def fetch_metadata(request: MetadataRequest):
84
 
85
  """
86
+ Retrieves relevant metadata for a given prompt.
87
+
88
+ ### Example Input:
89
+ ```json
90
+ {
91
+ "prompt": "Tell me about mental health",
92
  "n_result": 3,
93
+ "score_threshold": 0.47
94
+ }
95
+ ```
96
+
97
+ ### Response:
98
+ - **200:** Metadata retrieved successfully.
99
+ - **400:** Invalid prompt or input data.
100
+ - **500:** Internal server error.
101
  """
102
  try:
103
+ if not request.prompt.strip():
104
+ raise HTTPException(status_code=400, detail="Prompt cannot be empty.")
105
+ logger.info(f"Fetching metadata for prompt: {request.prompt}")
106
+ embedding = embedding_service.get_text_embedding(request.prompt)
107
+ if not embedding:
108
+ raise HTTPException(status_code=400, detail="Failed to generate embedding for the given prompt.")
109
+ metadata = pinecone_service.retrieve_relevant_metadata(
110
+ embedding,
111
+ request.prompt,
112
+ request.n_result,
113
+ request.score_threshold
114
+ )
115
+ if not metadata:
116
+ raise HTTPException(status_code=404, detail="No relevant metadata found.")
117
+
118
+ logger.info(f"Successfully fetched metadata for prompt: {request.prompt}")
119
+ return JSONResponse(content={"metadata": metadata}, status_code=200)
120
+
121
+ except (ValueError, KeyError) as e:
122
+ logger.error(f"Invalid data format for metadata fetch: {e}")
123
+ raise HTTPException(status_code=400, detail=f"Invalid data format: {e}")
124
+
125
  except Exception as e:
126
+ logger.error(f"Unexpected error while fetching metadata: {e}")
127
+ raise HTTPException(status_code=500, detail="Failed to fetch metadata due to an unexpected error.")
src/backend/main.py CHANGED
@@ -3,7 +3,7 @@ from fastapi.middleware.cors import CORSMiddleware
3
  from datetime import datetime
4
  from api_routes.chat_api import router as chat_router
5
  from api_routes.knowledge_base_api import router as knowledge_base_router
6
- from api_routes.chat_history_db_api import router as chat_history_router
7
 
8
  description = (
9
  "Yuvabe Care Companion AI is designed to provide helpful and accurate "
 
3
  from datetime import datetime
4
  from api_routes.chat_api import router as chat_router
5
  from api_routes.knowledge_base_api import router as knowledge_base_router
6
+ from api_routes.chat_history_supabase_api import router as chat_history_router
7
 
8
  description = (
9
  "Yuvabe Care Companion AI is designed to provide helpful and accurate "
src/backend/services/__pycache__/embedding_service.cpython-313.pyc CHANGED
Binary files a/src/backend/services/__pycache__/embedding_service.cpython-313.pyc and b/src/backend/services/__pycache__/embedding_service.cpython-313.pyc differ
 
src/backend/services/__pycache__/llm_model_service.cpython-313.pyc CHANGED
Binary files a/src/backend/services/__pycache__/llm_model_service.cpython-313.pyc and b/src/backend/services/__pycache__/llm_model_service.cpython-313.pyc differ
 
src/backend/services/__pycache__/pinecone_service.cpython-313.pyc CHANGED
Binary files a/src/backend/services/__pycache__/pinecone_service.cpython-313.pyc and b/src/backend/services/__pycache__/pinecone_service.cpython-313.pyc differ
 
src/backend/services/__pycache__/schemas.cpython-313.pyc CHANGED
Binary files a/src/backend/services/__pycache__/schemas.cpython-313.pyc and b/src/backend/services/__pycache__/schemas.cpython-313.pyc differ
 
src/backend/services/__pycache__/supabase_service.cpython-313.pyc CHANGED
Binary files a/src/backend/services/__pycache__/supabase_service.cpython-313.pyc and b/src/backend/services/__pycache__/supabase_service.cpython-313.pyc differ
 
src/backend/services/llm_model_service.py CHANGED
@@ -73,15 +73,17 @@ def build_prompt(
73
  """
74
  conversation_history = truncate_conversation_history(conversation_history)
75
 
76
- if db_response and "No relevant information found" not in db_response:
77
  return SYSTEM_PROMPT + conversation_history + [
78
- {"role":"system", "content":f"{db_response}Please provide a detailed response based on the above information.If its relevant to the user query"},
 
 
79
  {"role": "user", "content": user_query}
80
  ]
81
 
82
  backup_response = (
83
- "I'm unable to find relevant data from the database. "
84
- "Please respond based on your expertise and available information."
85
  )
86
  return SYSTEM_PROMPT + conversation_history + [
87
  {"role": "system", "content": backup_response},
@@ -116,7 +118,6 @@ def get_health_advice(
116
  )
117
 
118
  assistant_reply = response.choices[0].message.content.strip()
119
- logger.info(f"Generated response: {assistant_reply}")
120
  return assistant_reply
121
 
122
  except (ConnectionError, TimeoutError) as e:
 
73
  """
74
  conversation_history = truncate_conversation_history(conversation_history)
75
 
76
+ if db_response and db_response.strip() and "No relevant information found" not in db_response:
77
  return SYSTEM_PROMPT + conversation_history + [
78
+ {"role": "system", "content": (f"Here is some context from the database: {db_response}. "
79
+ "If this information is relevant to the user's query, please use it to form your response. "
80
+ "Otherwise, rely on your own knowledge and expertise.")},
81
  {"role": "user", "content": user_query}
82
  ]
83
 
84
  backup_response = (
85
+ "I couldn't find specific data from the database. "
86
+ "Please provide a detailed response based on your expertise and available information."
87
  )
88
  return SYSTEM_PROMPT + conversation_history + [
89
  {"role": "system", "content": backup_response},
 
118
  )
119
 
120
  assistant_reply = response.choices[0].message.content.strip()
 
121
  return assistant_reply
122
 
123
  except (ConnectionError, TimeoutError) as e:
src/backend/services/pinecone_service.py CHANGED
@@ -92,7 +92,7 @@ def initialize_pinecone_index(pinecone, index_name, dimension=384, metric="cosin
92
  except Exception as e:
93
  logger.error(f"Error occurred while getting or creating the Pinecone index: {str(e)}", exc_info=True)
94
  return None
95
-
96
  index = initialize_pinecone_index(PINECONE, INDEX_NAME)
97
 
98
  def delete_records_by_ids(ids_to_delete):
 
92
  except Exception as e:
93
  logger.error(f"Error occurred while getting or creating the Pinecone index: {str(e)}", exc_info=True)
94
  return None
95
+
96
  index = initialize_pinecone_index(PINECONE, INDEX_NAME)
97
 
98
  def delete_records_by_ids(ids_to_delete):
src/backend/services/schemas.py CHANGED
@@ -1,5 +1,5 @@
1
  from pydantic import BaseModel
2
- from typing import List,Dict, Optional
3
 
4
  class ConversationInput(BaseModel):
5
  conversation_history: list[dict]
@@ -8,18 +8,8 @@ class ChatHistoryRequest(BaseModel):
8
  conversation_id: str
9
  messages: List[dict]
10
 
11
-
12
-
13
-
14
-
15
-
16
-
17
- class Chat_Response(BaseModel):
18
- prompt: Optional[List] = None
19
- response: Optional[Dict] = None
20
-
21
  class UpsertRequest(BaseModel):
22
- data: list # Expecting a list of JSON objects (rows of data)
23
 
24
  class DeleteRequest(BaseModel):
25
  ids_to_delete: list
@@ -27,13 +17,4 @@ class DeleteRequest(BaseModel):
27
  class MetadataRequest(BaseModel):
28
  prompt: str
29
  n_result: int = 3
30
- score_threshold: float = 0.45
31
-
32
- class ChatRequest(BaseModel):
33
- query: str
34
-
35
- class ChatResponse(BaseModel):
36
- response: str
37
-
38
- class ChatHistoryResponse(BaseModel):
39
- date: str
 
1
  from pydantic import BaseModel
2
+ from typing import List
3
 
4
  class ConversationInput(BaseModel):
5
  conversation_history: list[dict]
 
8
  conversation_id: str
9
  messages: List[dict]
10
 
 
 
 
 
 
 
 
 
 
 
11
  class UpsertRequest(BaseModel):
12
+ data: list
13
 
14
  class DeleteRequest(BaseModel):
15
  ids_to_delete: list
 
17
  class MetadataRequest(BaseModel):
18
  prompt: str
19
  n_result: int = 3
20
+ score_threshold: float = 0.45
 
 
 
 
 
 
 
 
 
src/backend/services/supabase_service.py CHANGED
@@ -1,76 +1,140 @@
1
  import json
2
  import os
3
- import sys
4
- src_directory = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..", "backend"))
5
- sys.path.append(src_directory)
6
- from supabase import create_client
7
  from datetime import datetime
 
8
  from utils import logger
 
9
 
 
 
 
 
 
10
  SUPABASE_URL = os.getenv('SUPABASE_URL')
11
- SUPABASE_KEY = os.getenv('SUPABASE_KEY')
12
  SUPABASE_BUCKET = os.getenv('SUPABASE_BUCKET')
13
- LLM_MODEL_NAME= os.getenv('LLM_MODEL_NAME')
14
-
15
- logger = logger.get_logger()
16
 
 
17
  supabase = create_client(SUPABASE_URL, SUPABASE_KEY)
18
 
19
- def store_chat_history(conversation_id, new_messages):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  try:
21
- file_path = f"chat-history/{conversation_id}.json"
22
  metadata = {
23
  "timestamp": datetime.now().isoformat(),
24
  "language": "en",
25
  "model": LLM_MODEL_NAME
26
  }
27
 
 
28
  try:
29
  existing_data = supabase.storage.from_(SUPABASE_BUCKET).download(file_path)
30
- chat_data = json.loads(existing_data.decode('utf-8'))
 
 
31
  chat_data['messages'].extend(new_messages)
32
- logger.info("Added the messages to the existing file")
33
- except Exception:
 
34
  chat_data = {
35
  "conversation_id": conversation_id,
36
  "messages": new_messages,
37
  "metadata": metadata
38
  }
39
- logger.info("Created a new chat history file.")
40
-
41
- updated_json_data = json.dumps(chat_data, indent=4)
42
 
 
43
  supabase.storage.from_(SUPABASE_BUCKET).upload(
44
  file_path, updated_json_data.encode('utf-8'),
45
  file_options={"content-type": "application/json", "upsert": "true"}
46
  )
47
- logger.info("Chat history stored successfully!")
48
 
49
- return {"message": "Chat history stored successfully!"}
50
 
 
 
 
51
  except Exception as e:
52
- logger.error(f"Error: {e}")
53
- return {"error": str(e)}
54
 
55
- def get_chat_history(conversation_id):
 
 
 
 
 
 
 
 
 
56
  try:
57
- file_path = f"chat-history/{conversation_id}.json"
58
  existing_data = supabase.storage.from_(SUPABASE_BUCKET).download(file_path)
59
-
60
- if existing_data:
61
- chat_data = json.loads(existing_data.decode('utf-8'))
62
- return chat_data
63
- else:
64
- logger.warning("No chat history found for the given conversation ID.")
65
- return {"message": "No chat history found."}
66
 
67
- except Exception as e:
68
- logger.error(f"Error retrieving chat history: {e}")
69
- return {"error": str(e)}
70
 
71
- def create_bucket_with_file(bucket_name:str):
72
- try:
73
- supabase.storage.create_bucket(bucket_name)
74
- logger.info(f"Bucket '{bucket_name}' created successfully.")
 
75
  except Exception as e:
76
- logger.error(f"Error creating bucket: {e}")
 
 
1
  import json
2
  import os
 
 
 
 
3
  from datetime import datetime
4
+ from supabase import create_client, StorageException
5
  from utils import logger
6
+ from dotenv import load_dotenv
7
 
8
+ # Logger Initialization
9
+ logger = logger.get_logger()
10
+
11
+ # Load Environment Variables
12
+ load_dotenv()
13
  SUPABASE_URL = os.getenv('SUPABASE_URL')
14
+ SUPABASE_KEY = os.getenv('SUPABASE_KEY')
15
  SUPABASE_BUCKET = os.getenv('SUPABASE_BUCKET')
16
+ LLM_MODEL_NAME = os.getenv('LLM_MODEL_NAME')
 
 
17
 
18
+ # Supabase Client Initialization
19
  supabase = create_client(SUPABASE_URL, SUPABASE_KEY)
20
 
21
+ # File Path Generator
22
+ def _get_file_path(conversation_id: str) -> str:
23
+ """
24
+ Generates the file path for storing chat history JSON files.
25
+
26
+ Args:
27
+ conversation_id (str): Unique identifier for the conversation.
28
+
29
+ Returns:
30
+ str: Path to the chat history JSON file.
31
+ """
32
+ return f"chat-history/{conversation_id}.json"
33
+
34
+ # JSON Loader with Safe Handling
35
+ def _load_json(data: bytes) -> dict:
36
+ """
37
+ Safely loads JSON data from a byte stream.
38
+
39
+ Args:
40
+ data (bytes): The byte stream to decode and parse as JSON.
41
+
42
+ Returns:
43
+ dict: Parsed JSON data or an empty dictionary on failure.
44
+ """
45
+ try:
46
+ return json.loads(data.decode('utf-8'))
47
+ except (json.JSONDecodeError, AttributeError):
48
+ logger.error("Failed to decode JSON data.")
49
+ return {}
50
+
51
+ # JSON Dumper with Indentation
52
+ def _dump_json(data: dict) -> str:
53
+ """
54
+ Formats data as a JSON string with indentation for better readability.
55
+
56
+ Args:
57
+ data (dict): The data to format.
58
+
59
+ Returns:
60
+ str: Formatted JSON string.
61
+ """
62
+ return json.dumps(data, indent=4)
63
+
64
+ def store_chat_history(conversation_id: str, new_messages: list) -> dict:
65
+ """
66
+ Stores or updates chat history in Supabase storage. If the file exists,
67
+ appends new messages; otherwise, creates a new file.
68
+
69
+ Args:
70
+ conversation_id (str): Unique identifier for the conversation.
71
+ new_messages (list): List of chat messages to store.
72
+
73
+ Returns:
74
+ dict: Operation success status and related message.
75
+ """
76
  try:
77
+ file_path = _get_file_path(conversation_id)
78
  metadata = {
79
  "timestamp": datetime.now().isoformat(),
80
  "language": "en",
81
  "model": LLM_MODEL_NAME
82
  }
83
 
84
+ # Load Existing Data
85
  try:
86
  existing_data = supabase.storage.from_(SUPABASE_BUCKET).download(file_path)
87
+ chat_data = _load_json(existing_data)
88
+ if 'messages' not in chat_data:
89
+ chat_data['messages'] = []
90
  chat_data['messages'].extend(new_messages)
91
+ logger.info(f"Messages appended to existing file for conversation ID: {conversation_id}")
92
+ except StorageException as e:
93
+ logger.warning(f"No existing file found. Creating new one for ID: {conversation_id}")
94
  chat_data = {
95
  "conversation_id": conversation_id,
96
  "messages": new_messages,
97
  "metadata": metadata
98
  }
 
 
 
99
 
100
+ updated_json_data = _dump_json(chat_data)
101
  supabase.storage.from_(SUPABASE_BUCKET).upload(
102
  file_path, updated_json_data.encode('utf-8'),
103
  file_options={"content-type": "application/json", "upsert": "true"}
104
  )
 
105
 
106
+ return {"success": True, "message": "Chat history stored successfully."}
107
 
108
+ except StorageException as e:
109
+ logger.error(f"Supabase Storage error: {e}")
110
+ return {"success": False, "error": "Failed to store chat history. Storage error occurred."}
111
  except Exception as e:
112
+ logger.error(f"Unexpected error while storing chat history: {e}")
113
+ return {"success": False, "error": "Unexpected error occurred while storing chat history."}
114
 
115
+ def retrieve_chat_history(conversation_id: str) -> dict:
116
+ """
117
+ Retrieves chat history from Supabase storage based on the given conversation ID.
118
+
119
+ Args:
120
+ conversation_id (str): Unique identifier for the conversation.
121
+
122
+ Returns:
123
+ dict: Retrieved chat data or error message on failure.
124
+ """
125
  try:
126
+ file_path = _get_file_path(conversation_id)
127
  existing_data = supabase.storage.from_(SUPABASE_BUCKET).download(file_path)
 
 
 
 
 
 
 
128
 
129
+ if not existing_data:
130
+ logger.warning(f"No chat history found for ID: {conversation_id}")
131
+ return {"success": False, "message": "No chat history found."}
132
 
133
+ return {"success": True, "data": _load_json(existing_data)}
134
+
135
+ except StorageException as e:
136
+ logger.error(f"Supabase Storage error while retrieving chat history: {e}")
137
+ return {"success": False, "error": "Failed to retrieve chat history. Storage error occurred."}
138
  except Exception as e:
139
+ logger.error(f"Unexpected error retrieving chat history for ID {conversation_id}: {e}")
140
+ return {"success": False, "error": "Unexpected error occurred while retrieving chat history."}
src/frontend/app/__pycache__/common_functions.cpython-313.pyc CHANGED
Binary files a/src/frontend/app/__pycache__/common_functions.cpython-313.pyc and b/src/frontend/app/__pycache__/common_functions.cpython-313.pyc differ
 
src/frontend/app/common_functions.py CHANGED
@@ -37,7 +37,7 @@ def config_homepage(page_title=PAGE_TITLE):
37
  def set_page_title(page_title=PAGE_TITLE):
38
  st.markdown(f"""
39
  <h1 style="color: white; text-align: left; font-size: 42px;">
40
- <i>{PAGE_TITLE} 🏥⚕️🤖</i>
41
  </h1>
42
  """, unsafe_allow_html=True
43
  )
@@ -141,7 +141,7 @@ def fetch_response(prompt, chat_history):
141
 
142
  def store_chat_history_in_db(conversation_id, messages):
143
  try:
144
- API_URL = f"http://localhost:8000/chat-db/store-history"
145
  payload = {"conversation_id": conversation_id, 'messages': messages}
146
  response = requests.post(API_URL, json=payload)
147
  logger.info("Successfully added the chat in db")
@@ -149,7 +149,7 @@ def store_chat_history_in_db(conversation_id, messages):
149
  logger.info(f"Failed to add the chat in db {e}")
150
 
151
  def get_chat_history_from_db(conversation_id: str, retries=3, delay=5):
152
- API_URL = "http://127.0.0.1:8000/chat-db/get-history"
153
  for attempt in range(retries):
154
  try:
155
  response = requests.get(API_URL, params={"conversation_id": conversation_id}, timeout=30)
@@ -171,24 +171,35 @@ def display_chat_history(conversation_id):
171
  with st.spinner("Fetching chat history..."):
172
  chat_history = get_chat_history_from_db(conversation_id)
173
 
174
- if not chat_history or "messages" not in chat_history or not chat_history["messages"]:
175
  st.error("No chat history found for this conversation.")
176
  return
177
-
178
- first_message_content = chat_history["messages"][0].get('content', '').strip()
179
  button_text = first_message_content[:20] if first_message_content else "No Content"
180
 
181
  if st.sidebar.button(f"Show History for {button_text}", key=f"show_history_{conversation_id}"):
182
  st.subheader(f"Chat History for Conversation ID: {conversation_id}")
183
- for message in chat_history["messages"]:
184
- if message['role'] == 'user':
185
- with st.chat_message('user'):
186
- st.write(message['role'])
187
- st.write(message['content'])
188
- elif message['role'] == 'assistant':
189
- with st.chat_message('assistant'):
190
- st.write(message['role'])
191
- st.write(message['content'])
 
 
 
 
 
 
 
 
 
 
 
192
 
193
  except Exception as e:
194
  logger.error(f"Error retrieving chat history for {conversation_id}: {e}")
 
37
  def set_page_title(page_title=PAGE_TITLE):
38
  st.markdown(f"""
39
  <h1 style="color: white; text-align: left; font-size: 42px;">
40
+ <i>{PAGE_TITLE} ⚕️</i>
41
  </h1>
42
  """, unsafe_allow_html=True
43
  )
 
141
 
142
  def store_chat_history_in_db(conversation_id, messages):
143
  try:
144
+ API_URL = f"http://localhost:8000/chat-history/store"
145
  payload = {"conversation_id": conversation_id, 'messages': messages}
146
  response = requests.post(API_URL, json=payload)
147
  logger.info("Successfully added the chat in db")
 
149
  logger.info(f"Failed to add the chat in db {e}")
150
 
151
  def get_chat_history_from_db(conversation_id: str, retries=3, delay=5):
152
+ API_URL = "http://127.0.0.1:8000/chat-history/retrieve"
153
  for attempt in range(retries):
154
  try:
155
  response = requests.get(API_URL, params={"conversation_id": conversation_id}, timeout=30)
 
171
  with st.spinner("Fetching chat history..."):
172
  chat_history = get_chat_history_from_db(conversation_id)
173
 
174
+ if not chat_history or "data" not in chat_history or not chat_history["data"].get("messages"):
175
  st.error("No chat history found for this conversation.")
176
  return
177
+
178
+ first_message_content = chat_history["data"]["messages"][0].get('content', '').strip()
179
  button_text = first_message_content[:20] if first_message_content else "No Content"
180
 
181
  if st.sidebar.button(f"Show History for {button_text}", key=f"show_history_{conversation_id}"):
182
  st.subheader(f"Chat History for Conversation ID: {conversation_id}")
183
+
184
+ for message in chat_history["data"]["messages"]:
185
+ role = message.get('role', '').capitalize()
186
+ content = message.get('content', '').strip()
187
+
188
+ if role == 'User':
189
+ st.markdown(f"**{role}:** {content}")
190
+ elif role == 'Assistant':
191
+ st.markdown(f"""
192
+ <div style="
193
+ background-color: #f0f2f6;
194
+ padding: 15px;
195
+ border-left: 5px solid #4CAF50;
196
+ border-radius: 8px;
197
+ margin-bottom: 10px;
198
+ box-shadow: 2px 2px 8px rgba(0, 0, 0, 0.1);">
199
+ <strong style="color: #333; font-size: 16px;">{role}:</strong>
200
+ <div style="margin-top: 5px; color: #555; font-size: 14px;">{content}</div>
201
+ </div>
202
+ """, unsafe_allow_html=True)
203
 
204
  except Exception as e:
205
  logger.error(f"Error retrieving chat history for {conversation_id}: {e}")
src/frontend/pages/chatbot.py CHANGED
@@ -7,6 +7,7 @@ API_URL = "http://localhost:8000/chat/get-health-advice/"
7
  NUMBER_OF_MESSAGES_TO_DISPLAY = 20
8
  common_functions.config_homepage()
9
  common_functions.set_page_title()
 
10
  # Initialize conversation history
11
  def initialize_conversation():
12
  assistant_message = ("Hello! I am your Yuvabe Care Companion AI, here to assist you with general medicine queries. "
 
7
  NUMBER_OF_MESSAGES_TO_DISPLAY = 20
8
  common_functions.config_homepage()
9
  common_functions.set_page_title()
10
+ common_functions.set_bg_image("src/frontend/images/health_care_baner_3.gif")
11
  # Initialize conversation history
12
  def initialize_conversation():
13
  assistant_message = ("Hello! I am your Yuvabe Care Companion AI, here to assist you with general medicine queries. "