File size: 4,866 Bytes
16134a9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
from fastapi import Request
from fastapi.responses import JSONResponse
import traceback
import random
from intent import extract_parameters, validate_variable_formats, detect_intent
from intent_api import execute_intent
from log import log
from llm_model import Message, LLMModel

async def handle_chat(msg: Message, request: Request, app, service_config, session, llm_model: LLMModel):
    try:
        user_input = msg.user_input.strip()
        project_name = session.project_name
        project_config = service_config.get_project_llm_config(project_name)
        project_intents = service_config.get_project_intents(project_name)

        if llm_model.model is None or llm_model.tokenizer is None:
            return {"error": f"{project_name} için model yüklenmedi."}

        detected_intent, intent_conf = await detect_intent(user_input)
        log(f"🎯 Intent tespit edildi: {detected_intent}, Confidence: {intent_conf:.2f}")

        current_intent = session.last_intent
        awaiting_variable = session.awaiting_variable

        if (
            awaiting_variable and
            detected_intent and
            detected_intent != current_intent and
            intent_conf > project_config["intent_confidence_treshold"]
        ):
            log("🧹 Konu değişikliği algılandı → context sıfırlanıyor")
            session.awaiting_variable = None
            session.variables = {}
            session.last_intent = detected_intent
            current_intent = detected_intent

        intent_is_valid = (
            detected_intent and
            intent_conf > project_config["intent_confidence_treshold"] and
            any(i["name"] == detected_intent for i in project_intents)
        )
        log(f"✅ Intent geçerli mi?: {intent_is_valid}")

        if intent_is_valid:
            session.last_intent = detected_intent
            intent_def = next(i for i in project_intents if i["name"] == detected_intent)
            pattern_list = intent_def.get("variables", [])
            variable_format_map = intent_def.get("variable_formats", {})
            data_formats = service_config.data_formats

            if awaiting_variable:
                extracted = extract_parameters(pattern_list, user_input)
                for p in extracted:
                    if p["key"] == awaiting_variable:
                        session.variables[awaiting_variable] = p["value"]
                        session.awaiting_variable = None
                        log(f"✅ Awaiting parametre tamamlandı: {awaiting_variable} = {p['value']}")
                        break

            extracted = extract_parameters(pattern_list, user_input)
            variables = {p["key"]: p["value"] for p in extracted}
            session.variables.update(variables)

            is_valid, validation_errors = validate_variable_formats(session.variables, variable_format_map, data_formats)
            log(f"📛 Validasyon hataları: {validation_errors}")

            if not is_valid:
                session.awaiting_variable = list(validation_errors.keys())[0]
                return {"response": list(validation_errors.values())[0]}

            expected_vars = list(variable_format_map.keys())
            missing_vars = [v for v in expected_vars if v not in session.variables]
            log(f"📌 Beklenen parametreler: {expected_vars}, Eksik: {missing_vars}")

            if missing_vars:
                session.awaiting_variable = missing_vars[0]
                return {"response": f"Lütfen {missing_vars[0]} bilgisini belirtir misiniz?"}

            log("🚀 execute_intent() çağrılıyor...")
            result = execute_intent(
                detected_intent,
                user_input,
                session.__dict__,
                {i["name"]: i for i in project_intents},
                data_formats
            )
            if "reply" in result:
                return {"reply": result["reply"]}
            elif "errors" in result:
                return {"response": list(result["errors"].values())[0]}
            else:
                return {"response": random.choice(project_config["fallback_answers"])}

        log("🤖 execute_intent çağrılmadı → LLM fallback devrede")
        session.awaiting_variable = None
        session.variables = {}
        response, response_conf = await llm_model.generate_response(user_input, project_config)
        if response_conf is not None and response_conf < project_config["llm_confidence_treshold"]:
            return {"response": random.choice(project_config["fallback_answers"])}
        return {"response": response}

    except Exception as e:
        traceback.print_exc()
        return JSONResponse(content={"error": str(e)}, status_code=500)