File size: 5,727 Bytes
a252004
f24aded
dd6bafd
447c15c
 
f24aded
 
 
447c15c
f24aded
 
 
 
 
dd6bafd
447c15c
0b05b65
dd6bafd
f24aded
 
dd6bafd
f24aded
a252004
f24aded
 
 
 
 
 
 
a252004
f24aded
447c15c
f24aded
447c15c
f24aded
447c15c
f24aded
 
 
 
447c15c
 
f24aded
447c15c
f24aded
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
"""
Flare – Chat Handler (state-machine + header session)
"""

from __future__ import annotations
import json, re
from typing import Dict, List
from fastapi import APIRouter, HTTPException, Request
from pydantic import BaseModel
from config_provider import ConfigProvider, VersionConfig, IntentConfig, ParameterConfig
from prompt_builder import (
    build_detection_prompt, build_param_extract_prompt,
    build_missing_param_prompt, build_api_humanize_prompt)
from validation_engine import validate
from api_executor import call_api
from session import session_store, Session
from utils import log

cfg=ConfigProvider.get()
router=APIRouter()

_cancel_words=re.compile(r"\b(vazgeç|iptal|boşver|cancel)\b",re.I)

# -------- helper ----------
def detect_intent(version: VersionConfig, text: str) -> IntentConfig | None:
    for it in version.intents:
        prompt=build_detection_prompt(version.general_prompt,it)
        if it.name.lower() in text.lower():   #  placeholder: ask_llm(prompt,text,"classification")
            return it
    return None

# --------- endpoints -------
class SessionStartRequest(BaseModel):
    project_name:str
class SessionStartResponse(BaseModel):
    session_id:str

@router.post("/start_session",response_model=SessionStartResponse)
def start_session(body:SessionStartRequest):
    s=session_store.create_session(body.project_name)
    return SessionStartResponse(session_id=s.session_id)

class ChatRequest(BaseModel):
    user_input:str
class ChatResponse(BaseModel):
    reply:str
    session:Dict

@router.post("/chat",response_model=ChatResponse)
def chat(req:Request, body:ChatRequest):
    sid=req.headers.get("X-Session-ID")
    if not sid: raise HTTPException(400,"X-Session-ID header missing")
    s=session_store.get_session(sid)
    if not s: raise HTTPException(404,"Session not found")

    version=_get_live_version(s.project_name)
    user_text=body.user_input
    s.add_turn("user",user_text)

    # quick cancel
    if _cancel_words.search(user_text):
        s.reset_flow()
        reply="Elbette, başka bir konuda nasıl yardımcı olabilirim?"
        s.add_turn("assistant",reply)
        return ChatResponse(reply=reply,session=s.__dict__)

    # ---- idle state: intent arar
    if s.state=="idle":
        intent=detect_intent(version,user_text)
        if not intent:
            reply="(LLM-sohbet cevabı)"  # burada ask_llm kullanılacak
            s.add_turn("assistant",reply)
            return ChatResponse(reply=reply,session=s.__dict__)

        s.last_intent=intent.name
        s.state="await_param"
        s.awaiting_parameters=[p.name for p in intent.parameters]
        s.variables={}     # yeni intentte öncekileri sıfırla

    # ---- await_param state
    if s.state=="await_param":
        intent=next(i for i in version.intents if i.name==s.last_intent)
        # intent değişti mi?
        new_int=detect_intent(version,user_text)
        if new_int and new_int.name!=intent.name:
            log("🔄 new intent overrides current flow")
            s.reset_flow()
            s.last_intent=new_int.name
            s.state="await_param" if new_int.parameters else "call_api"
            s.awaiting_parameters=[p.name for p in new_int.parameters]
            intent=new_int

        # param extraction
        for p in intent.parameters:
            if p.name in s.variables: continue
            if p.name in user_text.lower():    # basit demo extraction
                val=user_text
                if not validate(val,p):
                    reply=p.invalid_prompt or f"{p.caption or p.name} değerini doğrulayamadım."
                    s.add_turn("assistant",reply)
                    return ChatResponse(reply=reply,session=s.__dict__)
                s.variables[p.name]=val
                s.awaiting_parameters.remove(p.name)

        if s.awaiting_parameters:
            s.missing_ask_count+=1
            if s.missing_ask_count>=2:
                s.reset_flow()
                reply="Başka bir konuda yardımcı olabilir miyim?"
                s.add_turn("assistant",reply)
                return ChatResponse(reply=reply,session=s.__dict__)

            ask=build_missing_param_prompt(s.awaiting_parameters)
            s.add_turn("assistant",ask)
            return ChatResponse(reply=ask,session=s.__dict__)

        s.state="call_api"

    # ---- call_api state
    if s.state=="call_api":
        intent=next(i for i in version.intents if i.name==s.last_intent)
        api=cfg.get_api(intent.action)
        if not api: raise HTTPException(500,"API not found")

        try:
            resp=call_api(api,s.variables)
        except Exception as e:
            log(f"❌ API error {e}")
            s.reset_flow()
            reply=intent.fallback_error_prompt or "Hata oluştu."
            s.add_turn("assistant",reply)
            return ChatResponse(reply=reply,session=s.__dict__)

        s.api_raw=resp.json()  # debug
        s.state="humanize"

    # ---- humanize
    if s.state=="humanize":
        intent=next(i for i in version.intents if i.name==s.last_intent)
        api=cfg.get_api(intent.action)
        human_prompt=build_api_humanize_prompt(
            version.general_prompt,
            api.response_prompt or "",
            json.dumps(s.api_raw,ensure_ascii=False,indent=2)
        )
        reply="(LLM-humanize)"   # burada ask_llm kullanılacak
        s.add_turn("assistant",reply)
        s.reset_flow()
        return ChatResponse(reply=reply,session=s.__dict__)


def _get_live_version(project_name:str)->VersionConfig:
    proj=next(p for p in cfg.projects if p.name==project_name and p.enabled)
    return max((v for v in proj.versions if v.published),key=lambda x:x.id)