Spaces:
Running
Running
""" | |
Flare – Chat Handler (state-machine + header session) | |
""" | |
from __future__ import annotations | |
import json, re | |
from typing import Dict, List | |
from fastapi import APIRouter, HTTPException, Request | |
from pydantic import BaseModel | |
from config_provider import ConfigProvider, VersionConfig, IntentConfig, ParameterConfig | |
from prompt_builder import ( | |
build_detection_prompt, build_param_extract_prompt, | |
build_missing_param_prompt, build_api_humanize_prompt) | |
from validation_engine import validate | |
from api_executor import call_api | |
from session import session_store, Session | |
from utils import log | |
cfg=ConfigProvider.get() | |
router=APIRouter() | |
_cancel_words=re.compile(r"\b(vazgeç|iptal|boşver|cancel)\b",re.I) | |
# -------- helper ---------- | |
def detect_intent(version: VersionConfig, text: str) -> IntentConfig | None: | |
for it in version.intents: | |
prompt=build_detection_prompt(version.general_prompt,it) | |
if it.name.lower() in text.lower(): # placeholder: ask_llm(prompt,text,"classification") | |
return it | |
return None | |
# --------- endpoints ------- | |
class SessionStartRequest(BaseModel): | |
project_name:str | |
class SessionStartResponse(BaseModel): | |
session_id:str | |
def start_session(body:SessionStartRequest): | |
s=session_store.create_session(body.project_name) | |
return SessionStartResponse(session_id=s.session_id) | |
class ChatRequest(BaseModel): | |
user_input:str | |
class ChatResponse(BaseModel): | |
reply:str | |
session:Dict | |
def chat(req:Request, body:ChatRequest): | |
sid=req.headers.get("X-Session-ID") | |
if not sid: raise HTTPException(400,"X-Session-ID header missing") | |
s=session_store.get_session(sid) | |
if not s: raise HTTPException(404,"Session not found") | |
version=_get_live_version(s.project_name) | |
user_text=body.user_input | |
s.add_turn("user",user_text) | |
# quick cancel | |
if _cancel_words.search(user_text): | |
s.reset_flow() | |
reply="Elbette, başka bir konuda nasıl yardımcı olabilirim?" | |
s.add_turn("assistant",reply) | |
return ChatResponse(reply=reply,session=s.__dict__) | |
# ---- idle state: intent arar | |
if s.state=="idle": | |
intent=detect_intent(version,user_text) | |
if not intent: | |
reply="(LLM-sohbet cevabı)" # burada ask_llm kullanılacak | |
s.add_turn("assistant",reply) | |
return ChatResponse(reply=reply,session=s.__dict__) | |
s.last_intent=intent.name | |
s.state="await_param" | |
s.awaiting_parameters=[p.name for p in intent.parameters] | |
s.variables={} # yeni intentte öncekileri sıfırla | |
# ---- await_param state | |
if s.state=="await_param": | |
intent=next(i for i in version.intents if i.name==s.last_intent) | |
# intent değişti mi? | |
new_int=detect_intent(version,user_text) | |
if new_int and new_int.name!=intent.name: | |
log("🔄 new intent overrides current flow") | |
s.reset_flow() | |
s.last_intent=new_int.name | |
s.state="await_param" if new_int.parameters else "call_api" | |
s.awaiting_parameters=[p.name for p in new_int.parameters] | |
intent=new_int | |
# param extraction | |
for p in intent.parameters: | |
if p.name in s.variables: continue | |
if p.name in user_text.lower(): # basit demo extraction | |
val=user_text | |
if not validate(val,p): | |
reply=p.invalid_prompt or f"{p.caption or p.name} değerini doğrulayamadım." | |
s.add_turn("assistant",reply) | |
return ChatResponse(reply=reply,session=s.__dict__) | |
s.variables[p.name]=val | |
s.awaiting_parameters.remove(p.name) | |
if s.awaiting_parameters: | |
s.missing_ask_count+=1 | |
if s.missing_ask_count>=2: | |
s.reset_flow() | |
reply="Başka bir konuda yardımcı olabilir miyim?" | |
s.add_turn("assistant",reply) | |
return ChatResponse(reply=reply,session=s.__dict__) | |
ask=build_missing_param_prompt(s.awaiting_parameters) | |
s.add_turn("assistant",ask) | |
return ChatResponse(reply=ask,session=s.__dict__) | |
s.state="call_api" | |
# ---- call_api state | |
if s.state=="call_api": | |
intent=next(i for i in version.intents if i.name==s.last_intent) | |
api=cfg.get_api(intent.action) | |
if not api: raise HTTPException(500,"API not found") | |
try: | |
resp=call_api(api,s.variables) | |
except Exception as e: | |
log(f"❌ API error {e}") | |
s.reset_flow() | |
reply=intent.fallback_error_prompt or "Hata oluştu." | |
s.add_turn("assistant",reply) | |
return ChatResponse(reply=reply,session=s.__dict__) | |
s.api_raw=resp.json() # debug | |
s.state="humanize" | |
# ---- humanize | |
if s.state=="humanize": | |
intent=next(i for i in version.intents if i.name==s.last_intent) | |
api=cfg.get_api(intent.action) | |
human_prompt=build_api_humanize_prompt( | |
version.general_prompt, | |
api.response_prompt or "", | |
json.dumps(s.api_raw,ensure_ascii=False,indent=2) | |
) | |
reply="(LLM-humanize)" # burada ask_llm kullanılacak | |
s.add_turn("assistant",reply) | |
s.reset_flow() | |
return ChatResponse(reply=reply,session=s.__dict__) | |
def _get_live_version(project_name:str)->VersionConfig: | |
proj=next(p for p in cfg.projects if p.name==project_name and p.enabled) | |
return max((v for v in proj.versions if v.published),key=lambda x:x.id) | |