File size: 3,404 Bytes
cb61e8e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
from fastapi import Request
from fastapi.responses import JSONResponse
import traceback
import random
from llm_model import Message, LLMModel
from intent_api import execute_intent
from intent_utils import validate_variable_formats
from parse_llm_blocks import parse_llm_blocks
from log import log

async def handle_chat(msg: Message, request: Request, app, service_config, session, llm_model: LLMModel):
    try:
        user_input = msg.user_input.strip()
        log(f"💬 Kullanıcı input'u: '{user_input}'")

        project_name = session.project_name
        project_config = service_config.get_project_llm_config(project_name)
        system_prompt = service_config.system_prompt  # 👉 system_prompt buradan alınıyor

        # Chat history'ye user mesajını ekle
        session.chat_history.append({"role": "user", "content": user_input})

        # === LLM çağrısı
        llm_response = await llm_model.generate_response_with_messages(session.chat_history, project_config, system_prompt)
        log(f"🤖 LLM cevabı: {llm_response}")

        # === LLM cevabını parse et
        parsed = parse_llm_blocks(llm_response)
        intent = parsed["intent"]
        params = parsed["params"]
        missing = parsed["missing"]
        action_json = parsed["action_json"]

        # Chat history'ye assistant cevabını ekle
        session.chat_history.append({"role": "assistant", "content": llm_response})

        # === INTENT yok → havadan sudan konuşma
        if intent == "NONE":
            session.awaiting_variable = None
            session.last_intent = None
            session.variables.clear()
            return {"response": llm_response}

        # === INTENT varsa
        session.last_intent = intent
        session.variables.update(params)

        # Eksik parametre varsa
        if missing:
            session.awaiting_variable = missing[0]
            return {"response": f"Lütfen {', '.join(missing)} bilgisini belirtir misiniz?"}

        # === API çağrısı yap
        intent_definitions = {i["name"]: i for i in service_config.get_project_intents(project_name)}
        data_formats = service_config.data_formats

        # Parametreleri validasyonla kontrol et (backend güvenlik katmanı)
        variable_format_map = intent_definitions.get(intent, {}).get("variable_formats", {})
        is_valid, validation_errors = validate_variable_formats(session.variables, variable_format_map, data_formats)

        if not is_valid:
            session.awaiting_variable = list(validation_errors.keys())[0]
            return {"response": list(validation_errors.values())[0]}

        log("🚀 execute_intent() çağrılıyor...")
        result = execute_intent(
            intent,
            user_input,
            session.__dict__,
            intent_definitions,
            data_formats,
            project_name,
            service_config
        )

        if "reply" in result:
            return {"reply": result["reply"]}
        elif "errors" in result:
            return {"response": list(result["errors"].values())[0]}
        else:
            return {"response": random.choice(project_config["fallback_answers"])}

    except Exception as e:
        traceback.print_exc()
        return JSONResponse(content={"error": str(e)}, status_code=500)