Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -1,17 +1,15 @@
|
|
1 |
-
|
2 |
import gradio as gr
|
|
|
|
|
3 |
from sentence_transformers import SentenceTransformer, util
|
4 |
import torch
|
5 |
import requests
|
6 |
-
|
7 |
-
from gradio.routes import App
|
8 |
-
import uvicorn
|
9 |
|
10 |
-
# π
|
11 |
SUPABASE_URL = "https://olbjfxlclotxtnpjvpfj.supabase.co"
|
12 |
SUPABASE_KEY = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6Im9sYmpmeGxjbG90eHRucGp2cGZqIiwicm9sZSI6ImFub24iLCJpYXQiOjE3NTIyMzYwMDEsImV4cCI6MjA2NzgxMjAwMX0.7q_o5DCFEAAysnWXMChH4MI5qNhIVc4OgpT5JvgYxc0"
|
13 |
|
14 |
-
# π§ Load model
|
15 |
model = SentenceTransformer('all-MiniLM-L6-v2')
|
16 |
|
17 |
def get_faq_from_supabase(uid):
|
@@ -33,7 +31,7 @@ def get_faq_from_supabase(uid):
|
|
33 |
def chatbot(uid, question):
|
34 |
faqs = get_faq_from_supabase(uid)
|
35 |
if not faqs:
|
36 |
-
return "Tidak ada data FAQ
|
37 |
|
38 |
questions = [f["q"] for f in faqs]
|
39 |
answers = [f["a"] for f in faqs]
|
@@ -45,20 +43,30 @@ def chatbot(uid, question):
|
|
45 |
best_idx = torch.argmax(scores).item()
|
46 |
return answers[best_idx]
|
47 |
|
48 |
-
#
|
49 |
-
demo = gr.Interface(fn=chatbot, inputs=["text", "text"], outputs="text", title="Chatbot")
|
50 |
-
|
51 |
-
# π Tambahkan FastAPI app agar bisa menerima POST request
|
52 |
app = FastAPI()
|
53 |
-
app = App(app, demo)
|
54 |
|
55 |
-
# β
Endpoint khusus untuk Flutter/Postman
|
56 |
@app.post("/predict")
|
57 |
async def predict(request: Request):
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
+
from fastapi import FastAPI, Request
|
3 |
+
import uvicorn
|
4 |
from sentence_transformers import SentenceTransformer, util
|
5 |
import torch
|
6 |
import requests
|
7 |
+
import threading
|
|
|
|
|
8 |
|
9 |
+
# π Supabase setup
|
10 |
SUPABASE_URL = "https://olbjfxlclotxtnpjvpfj.supabase.co"
|
11 |
SUPABASE_KEY = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6Im9sYmpmeGxjbG90eHRucGp2cGZqIiwicm9sZSI6ImFub24iLCJpYXQiOjE3NTIyMzYwMDEsImV4cCI6MjA2NzgxMjAwMX0.7q_o5DCFEAAysnWXMChH4MI5qNhIVc4OgpT5JvgYxc0"
|
12 |
|
|
|
13 |
model = SentenceTransformer('all-MiniLM-L6-v2')
|
14 |
|
15 |
def get_faq_from_supabase(uid):
|
|
|
31 |
def chatbot(uid, question):
|
32 |
faqs = get_faq_from_supabase(uid)
|
33 |
if not faqs:
|
34 |
+
return "Tidak ada data FAQ."
|
35 |
|
36 |
questions = [f["q"] for f in faqs]
|
37 |
answers = [f["a"] for f in faqs]
|
|
|
43 |
best_idx = torch.argmax(scores).item()
|
44 |
return answers[best_idx]
|
45 |
|
46 |
+
# π FastAPI app
|
|
|
|
|
|
|
47 |
app = FastAPI()
|
|
|
48 |
|
|
|
49 |
@app.post("/predict")
|
50 |
async def predict(request: Request):
|
51 |
+
payload = await request.json()
|
52 |
+
uid, question = payload.get("data", [None, None])
|
53 |
+
if not uid or not question:
|
54 |
+
return {"error": "UID dan pertanyaan diperlukan."}
|
55 |
+
|
56 |
+
answer = chatbot(uid, question)
|
57 |
+
return {"data": [answer]}
|
58 |
+
|
59 |
+
# π Gradio UI (opsional)
|
60 |
+
def launch_gradio():
|
61 |
+
demo = gr.Interface(
|
62 |
+
fn=chatbot,
|
63 |
+
inputs=["text", "text"],
|
64 |
+
outputs="text",
|
65 |
+
title="Biruu Chatbot",
|
66 |
+
examples=[["uid123", "Apakah bisa bayar di tempat?"]],
|
67 |
+
allow_flagging="never"
|
68 |
+
)
|
69 |
+
demo.launch(share=True)
|
70 |
+
|
71 |
+
# βΆοΈ Jalankan Gradio di thread terpisah agar tidak bentrok dengan FastAPI
|
72 |
+
threading.Thread(target=launch_gradio).start()
|