ajnjdnjpek / app.py
Adieee5's picture
Waitress better than gunicorn
ab34076
raw
history blame
2.58 kB
from flask import Flask, request, jsonify, render_template
from flask_cors import CORS
from dotenv import load_dotenv
import os
from langchain_community.embeddings import HuggingFaceEmbeddings
from langchain_community.vectorstores import Chroma
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_core.prompts import PromptTemplate
from langchain.chains import RetrievalQA
app = Flask(__name__)
CORS(app)
# Load environment variables
load_dotenv()
GOOGLE_API_KEY = os.environ.get("GOOGLE_API_KEY")
if not GOOGLE_API_KEY:
raise ValueError("GOOGLE_API_KEY not found in environment variables.")
# Lazy globals
qa_chain = None
def get_qa_chain():
global qa_chain
if qa_chain is None:
# Initialize LLM
llm = ChatGoogleGenerativeAI(
model="gemini-2.0-flash-lite",
google_api_key=GOOGLE_API_KEY,
convert_system_message_to_human=True
)
# Embeddings and vector store
embedding_model = HuggingFaceEmbeddings(model_name="BAAI/bge-large-en-v1.5")
vectordb = Chroma(
persist_directory="chroma_store",
embedding_function=embedding_model,
collection_name="pdf_search_chroma"
)
retriever = vectordb.as_retriever(search_kwargs={"k": 6})
# Prompt
prompt_template = PromptTemplate.from_template("""
You are an intelligent assistant for students asking about their university.
If answer is not defined or not clearly understood, ask for clarification.
Answer clearly and helpfully based on the retrieved context. Do not make up information or suggestions.
Context:
{context}
Question:
{question}
Answer:
""")
# Create chain
qa_chain = RetrievalQA.from_chain_type(
llm=llm,
chain_type="stuff",
retriever=retriever,
chain_type_kwargs={"prompt": prompt_template}
)
return qa_chain
@app.route("/")
def home():
return render_template("index.html")
@app.route("/get", methods=["POST"])
def get_response():
data = request.get_json()
query = data.get("message", "")
if not query:
return jsonify({"response": {"response": "No message received."}}), 400
chain = get_qa_chain()
try:
response = chain.run(query)
return jsonify({"response": {"response": response}})
except Exception as e:
return jsonify({"response": {"response": f"Error: {str(e)}"}}), 500
if __name__ == "__main__":
app.run(debug=True)