File size: 2,111 Bytes
981d40d
aad84fe
 
 
a702cfd
981d40d
 
 
aad84fe
981d40d
2436221
a702cfd
2436221
981d40d
 
 
2436221
 
 
 
 
 
 
a702cfd
 
 
 
981d40d
a702cfd
981d40d
2436221
 
aad84fe
981d40d
 
 
a702cfd
981d40d
 
a702cfd
981d40d
 
aad84fe
981d40d
 
 
aad84fe
981d40d
 
aad84fe
981d40d
 
 
aad84fe
981d40d
 
 
 
 
 
 
 
a702cfd
981d40d
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64

import gradio as gr
from sentence_transformers import SentenceTransformer, util
import torch
import requests
from fastapi import FastAPI, Request
from gradio.routes import App
import uvicorn

# πŸ” Konfigurasi Supabase
SUPABASE_URL = "https://olbjfxlclotxtnpjvpfj.supabase.co"
SUPABASE_KEY = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6Im9sYmpmeGxjbG90eHRucGp2cGZqIiwicm9sZSI6ImFub24iLCJpYXQiOjE3NTIyMzYwMDEsImV4cCI6MjA2NzgxMjAwMX0.7q_o5DCFEAAysnWXMChH4MI5qNhIVc4OgpT5JvgYxc0"

# 🧠 Load model
model = SentenceTransformer('all-MiniLM-L6-v2')

def get_faq_from_supabase(uid):
    url = f"{SUPABASE_URL}/rest/v1/faq_texts?uid=eq.{uid}"
    headers = {
        "apikey": SUPABASE_KEY,
        "Authorization": f"Bearer {SUPABASE_KEY}",
        "Content-Type": "application/json"
    }
    try:
        r = requests.get(url, headers=headers)
        r.raise_for_status()
        data = r.json()
        return [{"q": d["question"], "a": d["answer"]} for d in data]
    except Exception as e:
        print("❌ Supabase error:", e)
        return []

def chatbot(uid, question):
    faqs = get_faq_from_supabase(uid)
    if not faqs:
        return "Tidak ada data FAQ untuk pengguna ini."

    questions = [f["q"] for f in faqs]
    answers = [f["a"] for f in faqs]

    embeddings = model.encode(questions, convert_to_tensor=True)
    query_embedding = model.encode(question, convert_to_tensor=True)

    scores = util.pytorch_cos_sim(query_embedding, embeddings)
    best_idx = torch.argmax(scores).item()
    return answers[best_idx]

# πŸŽ›οΈ Buat UI untuk testing
demo = gr.Interface(fn=chatbot, inputs=["text", "text"], outputs="text", title="Chatbot")

# πŸš€ Tambahkan FastAPI app agar bisa menerima POST request
app = FastAPI()
app = App(app, demo)

# βœ… Endpoint khusus untuk Flutter/Postman
@app.post("/predict")
async def predict(request: Request):
    try:
        payload = await request.json()
        uid, question = payload["data"]
        result = chatbot(uid, question)
        return {"data": [result]}
    except Exception as e:
        return {"error": str(e)}