|
""" |
|
RAG Integration Example |
|
|
|
This file demonstrates how to integrate the RAG system with your existing app.py |
|
without modifying the original code. This shows a separate example that you can |
|
use as a reference when you're ready to integrate RAG functionality. |
|
|
|
Usage: |
|
1. Run train_rag.py to build your knowledge base |
|
2. Use this integration example as a template for modifying your app.py |
|
""" |
|
|
|
from dotenv import load_dotenv |
|
from openai import OpenAI |
|
import json |
|
import os |
|
import requests |
|
import gradio as gr |
|
from rag_utils import RAGSystem |
|
import logging |
|
|
|
|
|
logging.basicConfig(level=logging.INFO, |
|
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') |
|
logger = logging.getLogger(__name__) |
|
|
|
load_dotenv(override=True) |
|
|
|
|
|
rag = RAGSystem() |
|
|
|
|
|
try: |
|
rag_index_path = "me/rag_index" |
|
if os.path.exists(rag_index_path): |
|
logger.info(f"Loading RAG index from {rag_index_path}") |
|
rag.load_index(rag_index_path) |
|
logger.info(f"Loaded {len(rag.chunks)} chunks from index") |
|
else: |
|
logger.warning(f"RAG index not found at {rag_index_path}. Run train_rag.py first.") |
|
|
|
rag.index_loaded = False |
|
except Exception as e: |
|
logger.error(f"Error loading RAG index: {e}") |
|
|
|
rag.index_loaded = False |
|
else: |
|
|
|
rag.index_loaded = True |
|
|
|
|
|
class MeWithRAG: |
|
"""Example class showing how to integrate RAG with your existing Me class""" |
|
|
|
def __init__(self): |
|
self.openai = OpenAI(api_key=os.getenv("GOOGLE_API_KEY"), |
|
base_url="https://generativelanguage.googleapis.com/v1beta/openai/") |
|
self.name = "Sagarnil Das" |
|
|
|
|
|
self.intro = "I'm a software engineer and data scientist with expertise in AI and machine learning." |
|
|
|
def system_prompt(self, query=None): |
|
"""Dynamic system prompt that includes RAG context if a query is provided""" |
|
system_prompt = f"You are acting as {self.name}. You are answering questions on {self.name}'s website, \ |
|
particularly questions related to {self.name}'s career, background, skills and experience. \ |
|
Your responsibility is to represent {self.name} for interactions on the website as faithfully as possible." |
|
|
|
|
|
if query and getattr(rag, 'index_loaded', False): |
|
try: |
|
context = rag.get_context_for_query(query, top_k=3) |
|
system_prompt += f"\n\n## Relevant Background Information:\n{context}\n\n" |
|
except Exception as e: |
|
logger.error(f"Error retrieving context: {e}") |
|
system_prompt += f"\n\n## Brief Introduction:\n{self.intro}\n\n" |
|
else: |
|
|
|
system_prompt += f"\n\n## Brief Introduction:\n{self.intro}\n\n" |
|
|
|
system_prompt += f"With this context, please chat with the user, always staying in character as {self.name}." |
|
return system_prompt |
|
|
|
def chat(self, message, history): |
|
"""Chat function that uses RAG to retrieve relevant context""" |
|
try: |
|
|
|
messages = [{"role": "system", "content": self.system_prompt()}] |
|
|
|
|
|
if isinstance(history, list) and all(isinstance(h, dict) for h in history): |
|
messages.extend(history) |
|
else: |
|
for user_msg, assistant_msg in history: |
|
messages.append({"role": "user", "content": user_msg}) |
|
messages.append({"role": "assistant", "content": assistant_msg}) |
|
|
|
|
|
messages.append({"role": "user", "content": message}) |
|
|
|
|
|
|
|
try: |
|
messages[0] = {"role": "system", "content": self.system_prompt(query=message)} |
|
except Exception as e: |
|
logger.error(f"Error generating system prompt: {e}") |
|
|
|
|
|
|
|
response = self.openai.chat.completions.create( |
|
model="gemini-2.0-flash", |
|
messages=messages |
|
) |
|
|
|
return response.choices[0].message.content |
|
except Exception as e: |
|
logger.error(f"Error in chat method: {e}") |
|
return f"I apologize, but I encountered an error while processing your request. Please try again later." |
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
|
me_rag = MeWithRAG() |
|
gr.ChatInterface(me_rag.chat, type="messages").launch() |
|
|
|
|
|
|
|
|
|
|