Ogghey commited on
Commit
20d5756
Β·
verified Β·
1 Parent(s): 69744c3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -41
app.py CHANGED
@@ -1,16 +1,14 @@
1
- import gradio as gr
2
  from fastapi import FastAPI, Request
3
- import uvicorn
4
  from sentence_transformers import SentenceTransformer, util
5
  import torch
6
  import requests
7
- import threading
8
 
9
- # πŸ” Supabase setup
10
  SUPABASE_URL = "https://olbjfxlclotxtnpjvpfj.supabase.co"
11
- SUPABASE_KEY = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6Im9sYmpmeGxjbG90eHRucGp2cGZqIiwicm9sZSI6ImFub24iLCJpYXQiOjE3NTIyMzYwMDEsImV4cCI6MjA2NzgxMjAwMX0.7q_o5DCFEAAysnWXMChH4MI5qNhIVc4OgpT5JvgYxc0"
12
 
13
- model = SentenceTransformer('all-MiniLM-L6-v2')
 
 
14
 
15
  def get_faq_from_supabase(uid):
16
  url = f"{SUPABASE_URL}/rest/v1/faq_texts?uid=eq.{uid}"
@@ -28,10 +26,17 @@ def get_faq_from_supabase(uid):
28
  print("❌ Supabase error:", e)
29
  return []
30
 
31
- def chatbot(uid, question):
 
 
 
 
 
 
 
32
  faqs = get_faq_from_supabase(uid)
33
  if not faqs:
34
- return "Tidak ada data FAQ."
35
 
36
  questions = [f["q"] for f in faqs]
37
  answers = [f["a"] for f in faqs]
@@ -39,37 +44,7 @@ def chatbot(uid, question):
39
  embeddings = model.encode(questions, convert_to_tensor=True)
40
  query_embedding = model.encode(question, convert_to_tensor=True)
41
 
42
- scores = util.pytorch_cos_sim(query_embedding, embeddings)
43
- best_idx = torch.argmax(scores).item()
44
- return answers[best_idx]
45
-
46
- # 🌐 FastAPI app
47
- app = FastAPI()
48
-
49
- @app.post("/predict")
50
- async def predict(request: Request):
51
- payload = await request.json()
52
- uid, question = payload.get("data", [None, None])
53
- if not uid or not question:
54
- return {"error": "UID dan pertanyaan diperlukan."}
55
-
56
- answer = chatbot(uid, question)
57
- return {"data": [answer]}
58
-
59
- # 🌈 Gradio UI (opsional)
60
- def launch_gradio():
61
- demo = gr.Interface(
62
- fn=chatbot,
63
- inputs=["text", "text"],
64
- outputs="text",
65
- title="Biruu Chatbot",
66
- examples=[["uid123", "Apakah bisa bayar di tempat?"]],
67
- allow_flagging="never"
68
- )
69
- demo.launch(share=True)
70
-
71
- # ▢️ Jalankan Gradio di thread terpisah agar tidak bentrok dengan FastAPI
72
- threading.Thread(target=launch_gradio).start()
73
 
74
- if _name_ == "_main_":
75
- uvicorn.run("app:app", host="0.0.0.0", port=7860)
 
 
1
  from fastapi import FastAPI, Request
 
2
  from sentence_transformers import SentenceTransformer, util
3
  import torch
4
  import requests
 
5
 
 
6
  SUPABASE_URL = "https://olbjfxlclotxtnpjvpfj.supabase.co"
7
+ SUPABASE_KEY = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6Im9sYmpmeGxjbG90eHRucGp2cGZqIiwicm9sZSI6ImFub24iLCJpYXQiOjE3NTIyMzYwMDEsImV4cCI6MjA2NzgxMjAwMX0.7q_o5DCFEAAysnWXMChH4MI5qNhIVc4OgpT5JvgYxc0" # isi dengan key kamu
8
 
9
+ model = SentenceTransformer("all-MiniLM-L6-v2")
10
+
11
+ app = FastAPI()
12
 
13
  def get_faq_from_supabase(uid):
14
  url = f"{SUPABASE_URL}/rest/v1/faq_texts?uid=eq.{uid}"
 
26
  print("❌ Supabase error:", e)
27
  return []
28
 
29
+ @app.post("/predict")
30
+ async def predict(request: Request):
31
+ body = await request.json()
32
+ uid, question = body.get("data", [None, None])
33
+
34
+ if not uid or not question:
35
+ return {"data": ["UID atau pertanyaan tidak valid."]}
36
+
37
  faqs = get_faq_from_supabase(uid)
38
  if not faqs:
39
+ return {"data": ["FAQ tidak ditemukan untuk UID ini."]}
40
 
41
  questions = [f["q"] for f in faqs]
42
  answers = [f["a"] for f in faqs]
 
44
  embeddings = model.encode(questions, convert_to_tensor=True)
45
  query_embedding = model.encode(question, convert_to_tensor=True)
46
 
47
+ similarity = util.pytorch_cos_sim(query_embedding, embeddings)
48
+ best_idx = torch.argmax(similarity).item()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
49
 
50
+ return {"data": [answers[best_idx]]}