Spaces:
Running
Running
File size: 5,000 Bytes
447c15c dd6bafd 447c15c dd6bafd 447c15c dd6bafd 447c15c dd6bafd 447c15c dd6bafd 447c15c 0b05b65 dd6bafd 447c15c dd6bafd 447c15c dd6bafd 447c15c dd6bafd 447c15c dd6bafd 447c15c dd6bafd 447c15c dd6bafd 447c15c dd6bafd 447c15c dd6bafd 447c15c dd6bafd 447c15c dd6bafd 447c15c dd6bafd 447c15c dd6bafd 447c15c dd6bafd 447c15c dd6bafd 447c15c dd6bafd 447c15c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 |
"""Flare – Chat / Intent Orchestration Endpoints
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
• POST /start_session → yeni session_id döner
• POST /chat → intent → param → API → humanize akışı
LLM entegrasyonu şimdilik stub.
"""
from __future__ import annotations
import json
from typing import Dict, Optional
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
from config_provider import (
ConfigProvider,
IntentConfig,
ParameterConfig,
VersionConfig,
)
from prompt_builder import (
build_intent_prompt,
build_param_prompt,
build_api_humanize_prompt,
)
from api_executor import call_api
from session import session_store, Session
from utils import log
router = APIRouter()
cfg = ConfigProvider.get()
# ---------------- Pydantic bodies ----------------
class SessionStartRequest(BaseModel):
project_name: str
class SessionStartResponse(BaseModel):
session_id: str
class ChatRequest(BaseModel):
session_id: str
user_input: str
class ChatResponse(BaseModel):
reply: str
session: Dict
# --------------- TEMPORARY LLM stub ---------------
def ask_llm(prompt: str, user_input: str, mode: str = "text"):
log(f"🤖 [STUB ask_llm] mode={mode} prompt_len={len(prompt)}")
if mode == "classification":
return "" # intent yokmuş gibi davran
if mode == "json":
return {}
return "Bu bir test yanıtıdır (LLM stub)."
# ---------------- Helper funcs --------------------
def _detect_intent(version: VersionConfig, user_input: str) -> Optional[IntentConfig]:
for intent in version.intents:
prompt = build_intent_prompt(version.general_prompt, intent)
llm_resp = ask_llm(prompt, user_input, mode="classification")
if intent.name.lower() in llm_resp.lower():
return intent
return None
def _extract_params(
intent: IntentConfig,
version: VersionConfig,
user_input: str,
current_vars: Dict[str, str],
) -> Dict[str, str]:
missing = [p for p in intent.parameters if p.name not in current_vars]
if not missing:
return current_vars
prompt_base = build_intent_prompt(version.general_prompt, intent)
prompt = build_param_prompt(prompt_base, missing)
llm_json = ask_llm(prompt, user_input, mode="json") or {}
current_vars.update(llm_json)
return current_vars
# ---------------- Endpoints -----------------------
@router.post("/start_session", response_model=SessionStartResponse)
def start_session(body: SessionStartRequest):
session = session_store.create_session(body.project_name)
return SessionStartResponse(session_id=session.session_id)
@router.post("/chat", response_model=ChatResponse)
def chat(body: ChatRequest):
session: Session | None = session_store.get_session(body.session_id)
if session is None:
raise HTTPException(status_code=404, detail="Session not found")
# ---- project & version seçimi ----
project = next((p for p in cfg.projects if p.name == session.project_name), None)
if project is None:
raise HTTPException(status_code=400, detail="Project not configured")
version = next((v for v in project.versions if v.published), None)
if version is None:
raise HTTPException(status_code=400, detail="No published version")
user_input = body.user_input
session.add_turn("user", user_input)
# 1) Intent detection
intent = _detect_intent(version, user_input)
if intent is None:
reply = ask_llm(version.general_prompt, user_input)
session.add_turn("assistant", reply)
return ChatResponse(reply=reply, session=session.to_dict())
# 2) Param extraction
vars_ = _extract_params(intent, version, user_input, session.variables)
# 3) Missing params?
missing = [p for p in intent.parameters if p.required and p.name not in vars_]
if missing:
ask_prompts = "\n".join(
p.invalid_prompt or f"{p.name} bilgisine ihtiyacım var." for p in missing
)
reply = ask_llm(version.general_prompt, ask_prompts)
session.awaiting_parameters = [p.name for p in missing]
session.add_turn("assistant", reply)
return ChatResponse(reply=reply, session=session.to_dict())
# 4) API call
api_cfg = cfg.apis[intent.action]
try:
resp = call_api(api_cfg, vars_)
except Exception as e:
log(f"❌ API error: {e}")
reply = intent.fallback_error_prompt or "İşlem sırasında hata oluştu."
session.add_turn("assistant", reply)
return ChatResponse(reply=reply, session=session.to_dict())
# 5) Humanize
human_prompt = build_api_humanize_prompt(
version.general_prompt,
api_cfg.response_prompt,
json.dumps(resp.json(), ensure_ascii=False, indent=2),
)
reply = ask_llm(human_prompt, "")
session.add_turn("assistant", reply)
return ChatResponse(reply=reply, session=session.to_dict())
|