Spaces:
Sleeping
Sleeping
File size: 2,368 Bytes
aad84fe fafa7b0 aad84fe a702cfd fafa7b0 aad84fe fafa7b0 2436221 a702cfd 2436221 981d40d 2436221 a702cfd 981d40d a702cfd 981d40d 2436221 aad84fe 981d40d fafa7b0 a702cfd 981d40d a702cfd 981d40d aad84fe 981d40d aad84fe fafa7b0 981d40d aad84fe 981d40d fafa7b0 e827c31 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 |
import gradio as gr
from fastapi import FastAPI, Request
import uvicorn
from sentence_transformers import SentenceTransformer, util
import torch
import requests
import threading
# π Supabase setup
SUPABASE_URL = "https://olbjfxlclotxtnpjvpfj.supabase.co"
SUPABASE_KEY = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6Im9sYmpmeGxjbG90eHRucGp2cGZqIiwicm9sZSI6ImFub24iLCJpYXQiOjE3NTIyMzYwMDEsImV4cCI6MjA2NzgxMjAwMX0.7q_o5DCFEAAysnWXMChH4MI5qNhIVc4OgpT5JvgYxc0"
model = SentenceTransformer('all-MiniLM-L6-v2')
def get_faq_from_supabase(uid):
url = f"{SUPABASE_URL}/rest/v1/faq_texts?uid=eq.{uid}"
headers = {
"apikey": SUPABASE_KEY,
"Authorization": f"Bearer {SUPABASE_KEY}",
"Content-Type": "application/json"
}
try:
r = requests.get(url, headers=headers)
r.raise_for_status()
data = r.json()
return [{"q": d["question"], "a": d["answer"]} for d in data]
except Exception as e:
print("β Supabase error:", e)
return []
def chatbot(uid, question):
faqs = get_faq_from_supabase(uid)
if not faqs:
return "Tidak ada data FAQ."
questions = [f["q"] for f in faqs]
answers = [f["a"] for f in faqs]
embeddings = model.encode(questions, convert_to_tensor=True)
query_embedding = model.encode(question, convert_to_tensor=True)
scores = util.pytorch_cos_sim(query_embedding, embeddings)
best_idx = torch.argmax(scores).item()
return answers[best_idx]
# π FastAPI app
app = FastAPI()
@app.post("/predict")
async def predict(request: Request):
payload = await request.json()
uid, question = payload.get("data", [None, None])
if not uid or not question:
return {"error": "UID dan pertanyaan diperlukan."}
answer = chatbot(uid, question)
return {"data": [answer]}
# π Gradio UI (opsional)
def launch_gradio():
demo = gr.Interface(
fn=chatbot,
inputs=["text", "text"],
outputs="text",
title="Biruu Chatbot",
examples=[["uid123", "Apakah bisa bayar di tempat?"]],
allow_flagging="never"
)
demo.launch(share=True)
# βΆοΈ Jalankan Gradio di thread terpisah agar tidak bentrok dengan FastAPI
threading.Thread(target=launch_gradio).start()
if _name_ == "_main_":
uvicorn.run("app:app", host="0.0.0.0", port=7860) |