flare / chat_handler.py
ciyidogan's picture
Upload 15 files
16134a9 verified
raw
history blame
4.87 kB
from fastapi import Request
from fastapi.responses import JSONResponse
import traceback
import random
from intent import extract_parameters, validate_variable_formats, detect_intent
from intent_api import execute_intent
from log import log
from llm_model import Message, LLMModel
async def handle_chat(msg: Message, request: Request, app, service_config, session, llm_model: LLMModel):
try:
user_input = msg.user_input.strip()
project_name = session.project_name
project_config = service_config.get_project_llm_config(project_name)
project_intents = service_config.get_project_intents(project_name)
if llm_model.model is None or llm_model.tokenizer is None:
return {"error": f"{project_name} için model yüklenmedi."}
detected_intent, intent_conf = await detect_intent(user_input)
log(f"🎯 Intent tespit edildi: {detected_intent}, Confidence: {intent_conf:.2f}")
current_intent = session.last_intent
awaiting_variable = session.awaiting_variable
if (
awaiting_variable and
detected_intent and
detected_intent != current_intent and
intent_conf > project_config["intent_confidence_treshold"]
):
log("🧹 Konu değişikliği algılandı → context sıfırlanıyor")
session.awaiting_variable = None
session.variables = {}
session.last_intent = detected_intent
current_intent = detected_intent
intent_is_valid = (
detected_intent and
intent_conf > project_config["intent_confidence_treshold"] and
any(i["name"] == detected_intent for i in project_intents)
)
log(f"✅ Intent geçerli mi?: {intent_is_valid}")
if intent_is_valid:
session.last_intent = detected_intent
intent_def = next(i for i in project_intents if i["name"] == detected_intent)
pattern_list = intent_def.get("variables", [])
variable_format_map = intent_def.get("variable_formats", {})
data_formats = service_config.data_formats
if awaiting_variable:
extracted = extract_parameters(pattern_list, user_input)
for p in extracted:
if p["key"] == awaiting_variable:
session.variables[awaiting_variable] = p["value"]
session.awaiting_variable = None
log(f"✅ Awaiting parametre tamamlandı: {awaiting_variable} = {p['value']}")
break
extracted = extract_parameters(pattern_list, user_input)
variables = {p["key"]: p["value"] for p in extracted}
session.variables.update(variables)
is_valid, validation_errors = validate_variable_formats(session.variables, variable_format_map, data_formats)
log(f"📛 Validasyon hataları: {validation_errors}")
if not is_valid:
session.awaiting_variable = list(validation_errors.keys())[0]
return {"response": list(validation_errors.values())[0]}
expected_vars = list(variable_format_map.keys())
missing_vars = [v for v in expected_vars if v not in session.variables]
log(f"📌 Beklenen parametreler: {expected_vars}, Eksik: {missing_vars}")
if missing_vars:
session.awaiting_variable = missing_vars[0]
return {"response": f"Lütfen {missing_vars[0]} bilgisini belirtir misiniz?"}
log("🚀 execute_intent() çağrılıyor...")
result = execute_intent(
detected_intent,
user_input,
session.__dict__,
{i["name"]: i for i in project_intents},
data_formats
)
if "reply" in result:
return {"reply": result["reply"]}
elif "errors" in result:
return {"response": list(result["errors"].values())[0]}
else:
return {"response": random.choice(project_config["fallback_answers"])}
log("🤖 execute_intent çağrılmadı → LLM fallback devrede")
session.awaiting_variable = None
session.variables = {}
response, response_conf = await llm_model.generate_response(user_input, project_config)
if response_conf is not None and response_conf < project_config["llm_confidence_treshold"]:
return {"response": random.choice(project_config["fallback_answers"])}
return {"response": response}
except Exception as e:
traceback.print_exc()
return JSONResponse(content={"error": str(e)}, status_code=500)