import os, threading, uvicorn, time, traceback, random, json, asyncio, uuid
from fastapi import FastAPI, Request
from fastapi.responses import HTMLResponse, JSONResponse
from transformers import AutoTokenizer, AutoModelForSequenceClassification
import intent_test_runner
from service_config import ServiceConfig
import intent, intent, llm_model
from log import log
from chat_handler_debug import handle_chat
from llm_model import get_model, get_tokenizer
s_config = ServiceConfig()
s_config.setup_environment()
# === FastAPI
app = FastAPI()
chat_history = []
@app.get("/")
def health():
return {"status": "ok"}
import uuid # yukarıda zaten eklendiğini varsayıyoruz
@app.post("/run_tests", status_code=202)
def run_tests():
log("🚦 /run_tests çağrıldı. Testler başlatılıyor...")
threading.Thread(target=intent_test_runner.run_all_tests, daemon=True).start()
return {"status": "running", "message": "Test süreci başlatıldı."}
@app.get("/start", response_class=HTMLResponse)
def root():
# Yeni session ID üret
session_id = str(uuid.uuid4())
session_info = {
"session_id": session_id,
"variables": {},
"auth_tokens": {},
"last_intent": None,
"awaiting_variable": None
}
# Session store başlatıldıysa ekle
if not hasattr(app.state, "session_store"):
app.state.session_store = {}
app.state.session_store[session_id] = session_info
log(f"🌐 /start ile yeni session başlatıldı: {session_id}")
# HTML + session_id gömülü
return f"""
Turkcell LLM Chat
"""
@app.post("/start_chat")
def start_chat():
if get_model() is None or get_tokenizer() is None:
return {"error": "Model yüklenmedi."}
if not hasattr(app.state, "session_store"):
app.state.session_store = {}
session_id = str(uuid.uuid4())
session_info = {
"session_id": session_id,
"variables": {},
"auth_tokens": {},
"last_intent": None,
"awaiting_variable": None
}
app.state.session_store[session_id] = session_info
log(f"🆕 Yeni session başlatıldı: {session_id}")
return {"session_id": session_id}
@app.post("/train_intents", status_code=202)
def train_intents(train_input: intent.TrainInput):
log("📥 POST /train_intents çağrıldı.")
intents = train_input.intents
s_config.INTENT_DEFINITIONS = {intent["name"]: intent for intent in intents}
threading.Thread(target=lambda: intent.background_training(intents, s_config), daemon=True).start()
return {"status": "accepted", "message": "Intent eğitimi arka planda başlatıldı."}
@app.post("/load_intent_model")
def load_intent_model():
try:
intent.INTENT_TOKENIZER = AutoTokenizer.from_pretrained(s_config.INTENT_MODEL_PATH)
intent.INTENT_MODEL = AutoModelForSequenceClassification.from_pretrained(s_config.INTENT_MODEL_PATH)
with open(os.path.join(s_config.INTENT_MODEL_PATH, "label2id.json")) as f:
intent.LABEL2ID = json.load(f)
return {"status": "ok", "message": "Intent modeli yüklendi."}
except Exception as e:
return JSONResponse(content={"error": str(e)}, status_code=500)
@app.post("/chat")
async def chat(msg: llm_model.Message, request: Request):
return await handle_chat(msg, request, app, s_config)
threading.Thread(target=llm_model.setup_model, kwargs={"s_config": s_config}, daemon=True).start()
threading.Thread(target=lambda: uvicorn.run(app, host="0.0.0.0", port=7860), daemon=True).start()
while True:
time.sleep(60)