Spaces:
Sleeping
Sleeping
Ajey95
commited on
Commit
Β·
6c689f5
1
Parent(s):
5614a15
Fix:app.py
Browse files
app.py
CHANGED
@@ -627,11 +627,11 @@ from dotenv import load_dotenv
|
|
627 |
from flask import Flask, render_template, request, jsonify, session
|
628 |
import google.generativeai as genai
|
629 |
|
630 |
-
# Import new langchain components and our
|
631 |
from langchain_google_genai import GoogleGenerativeAIEmbeddings
|
632 |
from langchain_community.vectorstores import FAISS
|
633 |
from utils.helpers import create_vector_store, get_greeting, load_quotes
|
634 |
-
from agents.router_agent import RouterAgent
|
635 |
|
636 |
# --- Initial Setup ---
|
637 |
load_dotenv()
|
@@ -651,19 +651,18 @@ try:
|
|
651 |
genai.configure(api_key=GEMINI_API_KEY)
|
652 |
model = genai.GenerativeModel('gemini-1.5-flash')
|
653 |
|
654 |
-
# Load the persistent knowledge base from disk
|
655 |
if os.path.exists('faiss_index'):
|
656 |
embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
|
657 |
vector_store = FAISS.load_local("faiss_index", embeddings, allow_dangerous_deserialization=True)
|
658 |
print("β
Gemini Model and Knowledge Base loaded successfully!")
|
659 |
else:
|
660 |
-
print("β
Gemini Model loaded. No knowledge base found.")
|
661 |
else:
|
662 |
print("β οΈ No Gemini API key found.")
|
663 |
except Exception as e:
|
664 |
print(f"β Error during initialization: {e}")
|
665 |
|
666 |
-
# --- Main AI Application Class ---
|
667 |
class MyPharmaAI:
|
668 |
def __init__(self, gemini_model, vector_store_db):
|
669 |
self.router = RouterAgent(gemini_model)
|
@@ -671,14 +670,14 @@ class MyPharmaAI:
|
|
671 |
self.vector_store = vector_store_db
|
672 |
|
673 |
def process_query(self, query, viva_state, chat_history):
|
|
|
|
|
674 |
file_context = ""
|
675 |
if self.vector_store:
|
676 |
-
|
677 |
-
relevant_docs = self.vector_store.similarity_search(query)
|
678 |
-
# Join the content of the relevant docs to create the context
|
679 |
file_context = "\n".join(doc.page_content for doc in relevant_docs)
|
680 |
|
681 |
-
# Pass the retrieved context to the router
|
682 |
return self.router.route_query(query, file_context, viva_state, chat_history)
|
683 |
|
684 |
pharma_ai = MyPharmaAI(model, vector_store)
|
@@ -686,32 +685,38 @@ pharma_ai = MyPharmaAI(model, vector_store)
|
|
686 |
# --- Flask Routes ---
|
687 |
@app.route('/')
|
688 |
def index():
|
|
|
689 |
return render_template('index.html', greeting=get_greeting(), daily_quote=random.choice(pharma_ai.quotes))
|
690 |
|
691 |
@app.route('/chat', methods=['POST'])
|
692 |
def chat():
|
693 |
-
data = request.get_json()
|
694 |
-
query = data.get('query', '').strip()
|
695 |
-
if not query:
|
696 |
-
return jsonify({'success': False, 'error': 'Empty query'}), 400
|
697 |
-
|
698 |
-
chat_history = session.get('chat_history', [])
|
699 |
-
viva_state = session.get('viva_state', None)
|
700 |
-
|
701 |
try:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
702 |
result = pharma_ai.process_query(query, viva_state, chat_history)
|
|
|
703 |
if result.get('success'):
|
704 |
chat_history.append({'role': 'user', 'parts': [query]})
|
705 |
chat_history.append({'role': 'model', 'parts': [result.get('message', '')]})
|
706 |
session['chat_history'] = chat_history[-10:]
|
|
|
|
|
|
|
|
|
707 |
return jsonify(result)
|
|
|
708 |
except Exception as e:
|
709 |
print(f"Error in /chat endpoint: {e}")
|
710 |
return jsonify({'success': False, 'message': f'Server error: {e}', 'agent_used': 'error'}), 500
|
711 |
|
712 |
# --- Main Execution ---
|
713 |
if __name__ == '__main__':
|
714 |
-
# Ensure data folder exists for quotes.json
|
715 |
-
os.makedirs('data', exist_ok=True)
|
716 |
port = int(os.environ.get('PORT', 7860))
|
717 |
app.run(host='0.0.0.0', port=port)
|
|
|
627 |
from flask import Flask, render_template, request, jsonify, session
|
628 |
import google.generativeai as genai
|
629 |
|
630 |
+
# Import new langchain components and our helpers
|
631 |
from langchain_google_genai import GoogleGenerativeAIEmbeddings
|
632 |
from langchain_community.vectorstores import FAISS
|
633 |
from utils.helpers import create_vector_store, get_greeting, load_quotes
|
634 |
+
from agents.router_agent import RouterAgent # Re-import the RouterAgent
|
635 |
|
636 |
# --- Initial Setup ---
|
637 |
load_dotenv()
|
|
|
651 |
genai.configure(api_key=GEMINI_API_KEY)
|
652 |
model = genai.GenerativeModel('gemini-1.5-flash')
|
653 |
|
|
|
654 |
if os.path.exists('faiss_index'):
|
655 |
embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
|
656 |
vector_store = FAISS.load_local("faiss_index", embeddings, allow_dangerous_deserialization=True)
|
657 |
print("β
Gemini Model and Knowledge Base loaded successfully!")
|
658 |
else:
|
659 |
+
print("β
Gemini Model loaded. No knowledge base found to load.")
|
660 |
else:
|
661 |
print("β οΈ No Gemini API key found.")
|
662 |
except Exception as e:
|
663 |
print(f"β Error during initialization: {e}")
|
664 |
|
665 |
+
# --- Main AI Application Class (Reinstated) ---
|
666 |
class MyPharmaAI:
|
667 |
def __init__(self, gemini_model, vector_store_db):
|
668 |
self.router = RouterAgent(gemini_model)
|
|
|
670 |
self.vector_store = vector_store_db
|
671 |
|
672 |
def process_query(self, query, viva_state, chat_history):
|
673 |
+
# This is the core logic that combines both systems:
|
674 |
+
# 1. Search the permanent knowledge base for context.
|
675 |
file_context = ""
|
676 |
if self.vector_store:
|
677 |
+
relevant_docs = self.vector_store.similarity_search(query, k=3) # Get top 3 results
|
|
|
|
|
678 |
file_context = "\n".join(doc.page_content for doc in relevant_docs)
|
679 |
|
680 |
+
# 2. Pass the retrieved context to the multi-agent router system.
|
681 |
return self.router.route_query(query, file_context, viva_state, chat_history)
|
682 |
|
683 |
pharma_ai = MyPharmaAI(model, vector_store)
|
|
|
685 |
# --- Flask Routes ---
|
686 |
@app.route('/')
|
687 |
def index():
|
688 |
+
# Use the correct template name
|
689 |
return render_template('index.html', greeting=get_greeting(), daily_quote=random.choice(pharma_ai.quotes))
|
690 |
|
691 |
@app.route('/chat', methods=['POST'])
|
692 |
def chat():
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
693 |
try:
|
694 |
+
data = request.get_json()
|
695 |
+
query = data.get('query', '').strip()
|
696 |
+
if not query:
|
697 |
+
return jsonify({'success': False, 'error': 'Empty query'}), 400
|
698 |
+
|
699 |
+
chat_history = session.get('chat_history', [])
|
700 |
+
viva_state = session.get('viva_state', None)
|
701 |
+
|
702 |
+
# Call the main orchestrator
|
703 |
result = pharma_ai.process_query(query, viva_state, chat_history)
|
704 |
+
|
705 |
if result.get('success'):
|
706 |
chat_history.append({'role': 'user', 'parts': [query]})
|
707 |
chat_history.append({'role': 'model', 'parts': [result.get('message', '')]})
|
708 |
session['chat_history'] = chat_history[-10:]
|
709 |
+
|
710 |
+
if 'viva_state' in result:
|
711 |
+
session['viva_state'] = result.get('viva_state')
|
712 |
+
|
713 |
return jsonify(result)
|
714 |
+
|
715 |
except Exception as e:
|
716 |
print(f"Error in /chat endpoint: {e}")
|
717 |
return jsonify({'success': False, 'message': f'Server error: {e}', 'agent_used': 'error'}), 500
|
718 |
|
719 |
# --- Main Execution ---
|
720 |
if __name__ == '__main__':
|
|
|
|
|
721 |
port = int(os.environ.get('PORT', 7860))
|
722 |
app.run(host='0.0.0.0', port=port)
|