File size: 6,896 Bytes
4b9f7d2
 
 
 
 
8ecf4a5
7ea8c2e
 
4b9f7d2
8ecf4a5
 
4b9f7d2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b67fb94
 
 
4b9f7d2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d3cb3ec
8ecf4a5
4b9f7d2
 
 
 
 
8ecf4a5
 
 
4b9f7d2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8ecf4a5
4b9f7d2
8ecf4a5
4b9f7d2
 
8ecf4a5
4b9f7d2
 
 
 
 
 
 
 
 
 
8ecf4a5
4b9f7d2
 
8ecf4a5
 
4b9f7d2
 
8ecf4a5
 
 
4b9f7d2
 
 
 
 
bd24a6a
4b9f7d2
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
import os, threading, uvicorn, time, traceback, random, json, asyncio, uuid
from fastapi import FastAPI, Request
from fastapi.responses import HTMLResponse, JSONResponse
from transformers import AutoTokenizer, AutoModelForSequenceClassification
import intent_test_runner
from service_config import ServiceConfig
import intent, intent, llm_model
from log import log

s_config = ServiceConfig()
s_config.setup_environment()

# === FastAPI
app = FastAPI()
chat_history = []

@app.get("/")
def health():
    return {"status": "ok"}

import uuid  # yukarıda zaten eklendiğini varsayıyoruz

@app.post("/run_tests", status_code=202)
def run_tests():
    log("🚦 /run_tests çağrıldı. Testler başlatılıyor...")
    threading.Thread(target=intent_test_runner.run_all_tests, daemon=True).start()
    return {"status": "running", "message": "Test süreci başlatıldı."}

@app.get("/start", response_class=HTMLResponse)
def root():
    # Yeni session ID üret
    session_id = str(uuid.uuid4())
    session_info = {
        "session_id": session_id,
        "variables": {},
        "auth_tokens": {},
        "last_intent": None,
        "awaiting_variable": None
    }

    # Session store başlatıldıysa ekle
    if not hasattr(app.state, "session_store"):
        app.state.session_store = {}
    app.state.session_store[session_id] = session_info

    log(f"🌐 /start ile yeni session başlatıldı: {session_id}")

    # HTML + session_id gömülü
    return f"""
    <html><body>
        <h2>Turkcell LLM Chat</h2>
        <textarea id='input' rows='4' cols='60'></textarea><br>
        <button onclick='send()'>Gönder</button><br><br>
        <label>Model Cevabı:</label><br>
        <textarea id='output' rows='10' cols='80' readonly style='white-space: pre-wrap;'></textarea>
        <script>
        const sessionId = "{session_id}";
        localStorage.setItem("session_id", sessionId);

        async function send() {{
            const input = document.getElementById("input").value;
            const res = await fetch('/chat', {{
                method: 'POST',
                headers: {{
                    'Content-Type': 'application/json',
                    'X-Session-ID': sessionId
                }},
                body: JSON.stringify({{ user_input: input }})
            }});
            const data = await res.json();
            document.getElementById('output').value = data.reply || data.response || data.error || 'Hata oluştu.';
        }}
        </script>
    </body></html>
    """

@app.post("/start_chat")
def start_chat():
    if llm_model.model is None or llm_model.tokenizer is None:
        return {"error": "Model yüklenmedi."}
        
    if not hasattr(app.state, "session_store"):
        app.state.session_store = {}

    session_id = str(uuid.uuid4())
    session_info = {
        "session_id": session_id,
        "variables": {},
        "auth_tokens": {},
        "last_intent": None,
        "awaiting_variable": None
    }
    app.state.session_store[session_id] = session_info
    log(f"🆕 Yeni session başlatıldı: {session_id}")
    return {"session_id": session_id}

@app.post("/train_intents", status_code=202)
def train_intents(train_input: intent.TrainInput):
    log("📥 POST /train_intents çağrıldı.")
    intents = train_input.intents
    s_config.INTENT_DEFINITIONS = {intent["name"]: intent for intent in intents}
    threading.Thread(target=lambda: intent.background_training(intents, s_config), daemon=True).start()
    return {"status": "accepted", "message": "Intent eğitimi arka planda başlatıldı."}

@app.post("/load_intent_model")
def load_intent_model():
    try:
        intent.INTENT_TOKENIZER = AutoTokenizer.from_pretrained(s_config.INTENT_MODEL_PATH)
        intent.INTENT_MODEL = AutoModelForSequenceClassification.from_pretrained(s_config.INTENT_MODEL_PATH)
        with open(os.path.join(s_config.INTENT_MODEL_PATH, "label2id.json")) as f:
            intent.LABEL2ID = json.load(f)
        return {"status": "ok", "message": "Intent modeli yüklendi."}
    except Exception as e:
        return JSONResponse(content={"error": str(e)}, status_code=500)

@app.post("/chat")
async def chat(msg: llm_model.Message, request: Request):
    user_input = msg.user_input.strip()
    session_id = request.headers.get("X-Session-ID", "demo-session")

    if not hasattr(app.state, "session_store"):
        app.state.session_store = {}

    session_store = getattr(app.state, "session_store", {})
    session_info = {
        "session_id": session_id,
        "variables": {},
        "auth_tokens": {},
        "last_intent": None
    }
    session = session_store.get(session_id, session_info)
    try:
        if llm_model.model is None or llm_model.tokenizer is None:
            return {"error": "Model yüklenmedi."}

        if s_config.INTENT_MODEL:
            intent_task = asyncio.create_task(intent.detect_intent(user_input))
            response_task = asyncio.create_task(llm_model.generate_response(user_input, s_config))
            intent, intent_conf = await intent_task
            log(f"🎯 Intent: {intent} (conf={intent_conf:.2f})")
            if intent_conf > s_config.INTENT_CONFIDENCE_THRESHOLD and intent in s_config.INTENT_DEFINITIONS:
                result = intent.execute_intent(intent, user_input, session)
                if "reply" in result:
                    session_store[session_id] = result["session"]
                    app.state.session_store = session_store
                    return {"reply": result["reply"]}
                elif "errors" in result:
                    session_store[session_id] = result["session"]
                    app.state.session_store = session_store
                    return {"response": list(result["errors"].values())[0]}
                else:
                    return {"response": random.choice(s_config.FALLBACK_ANSWERS)}
            else:
                response, response_conf = await response_task
                if response_conf is not None and response_conf < s_config.LLM_CONFIDENCE_THRESHOLD:
                    return {"response": random.choice(s_config.FALLBACK_ANSWERS)}
                return {"response": response}
        else:
            response, response_conf = await llm_model.generate_response(user_input, s_config)
            if response_conf is not None and response_conf < s_config.LLM_CONFIDENCE_THRESHOLD:
                return {"response": random.choice(s_config.FALLBACK_ANSWERS)}
            return {"response": response}
    except Exception as e:
        traceback.print_exc()
        return JSONResponse(content={"error": str(e)}, status_code=500)

threading.Thread(target=llm_model.setup_model, kwargs={"s_config": s_config}, daemon=True).start()
threading.Thread(target=lambda: uvicorn.run(app, host="0.0.0.0", port=7860), daemon=True).start()
while True:
    time.sleep(60)