LexGuardian / app.py
sunbal7's picture
Update app.py
9406250 verified
raw
history blame
1.52 kB
import streamlit as st
import sympy as sp
import chromadb
from transformers import pipeline
from langchain.chains import RetrievalQA
from langchain.vectorstores import FAISS
from langchain.embeddings import SentenceTransformerEmbeddings
from langchain.llms import OpenAI
# Initialize ChromaDB for Retrieval-Augmented Generation (RAG)
chroma_client = chromadb.PersistentClient(path="./chroma_db")
embedding_model = SentenceTransformerEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
# Load the RAG-based Retrieval System
vectorstore = FAISS.load_local("faiss_index", embedding_model)
retriever = vectorstore.as_retriever()
qa_chain = RetrievalQA.from_chain_type(llm=OpenAI(), retriever=retriever)
# Load NLP Model
model = pipeline("text2text-generation", model="google/flan-t5-small")
st.title("πŸ€– AI-Driven Mathematical Model Generator")
st.write("Enter a problem statement in natural language to get a mathematical model.")
user_input = st.text_area("✍️ Enter your problem:")
if st.button("πŸš€ Generate Model"):
retrieved_context = qa_chain.run(user_input) # RAG retrieval
response = model(f"Generate a mathematical model for: {user_input}\nContext: {retrieved_context}", max_length=200)
try:
equation = sp.sympify(response[0]['generated_text'])
except:
equation = response[0]['generated_text'] # If parsing fails, return text
st.subheader("πŸ“Œ Mathematical Model:")
st.latex(sp.latex(equation))
st.code(str(equation), language='python')