ccr-colorado / app.py
tstone87's picture
Update app.py
94e7992 verified
raw
history blame
3.22 kB
import json
import gradio as gr
from sentence_transformers import SentenceTransformer
import faiss
import numpy as np
from huggingface_hub import InferenceClient
# ๐Ÿ”น Load JSON Data with Colorado Food Stamp Information
DATA_FILE = "colorado_foodstamps.json"
def load_json_data():
try:
with open(DATA_FILE, "r", encoding="utf-8") as f:
data = json.load(f)
# Ensure data is a dictionary, not a list
if isinstance(data, list):
data = {str(i): str(entry) for i, entry in enumerate(data)}
# Convert all values to strings
data = {key: str(value) for key, value in data.items()}
return data
except (FileNotFoundError, ValueError) as e:
return {"error": f"Data loading issue: {e}"}
data = load_json_data()
# ๐Ÿ”น Initialize FAISS for Searching Relevant Answers
model = SentenceTransformer("all-MiniLM-L6-v2") # Faster with good accuracy
def create_faiss_index(data):
texts = list(data.values())
embeddings = np.array([model.encode(text) for text in texts])
index = faiss.IndexFlatL2(embeddings.shape[1])
index.add(embeddings)
return index, texts
index, texts = create_faiss_index(data)
# ๐Ÿ”น Function to Search FAISS for Relevant Answers
def search_faiss(query, top_k=1):
query_embedding = model.encode(query).reshape(1, -1)
distances, indices = index.search(query_embedding, top_k)
return texts[indices[0][0]] if indices[0][0] < len(texts) else "No relevant information found."
# ๐Ÿ”น Hugging Face API for Additional Responses
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
def get_huggingface_response(query):
messages = [{"role": "system", "content": "Provide accurate food stamp information for Colorado."},
{"role": "user", "content": query}]
response = ""
for message in client.chat_completion(messages, max_tokens=512, stream=True, temperature=0.7, top_p=0.95):
response += message.choices[0].delta.content
return response
# ๐Ÿ”น Main Chatbot Function
def chatbot_response(message, history):
relevant_info = search_faiss(message)
if "No relevant information found." not in relevant_info:
# Summarize and clarify the retrieved policy text using Hugging Face LLM
user_query_with_context = f"""
The user is asking: {message}
The most relevant policy information retrieved is:
{relevant_info}
Please summarize this information in a clear, concise, and user-friendly way for someone inquiring about Colorado food stamps.
Provide the most recent information you can. If the most relevant policy information does not seem to apply do your best to answer the question accurately but make it clear you are not pulling the reply from the SNAP policy CCR and at that point you can encourage they try searching other keywords to possibly find better policy information.
"""
return get_huggingface_response(user_query_with_context)
return get_huggingface_response(message)
# ๐Ÿ”น Gradio Chat Interface
demo = gr.ChatInterface(chatbot_response, textbox=gr.Textbox(placeholder="Ask about Colorado food stamps..."))
demo.launch()