File size: 1,128 Bytes
0037e99
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
from fastapi import APIRouter, Request
from llm_model import Message
from chat_handler import handle_chat
from core import service_config, session_store, llm_models
import uuid

router = APIRouter()

@router.post("/start_chat")
def start_chat(request: Request):
    project_name = request.query_params.get("project_name")
    if not project_name:
        return {"error": "project_name parametresi gereklidir."}

    session = session_store.create_session(project_name)
    return {"session_id": session.session_id}

@router.post("/chat")
async def chat_endpoint(msg: Message, request: Request):
    session_id = request.headers.get("X-Session-ID")
    if not session_id:
        return {"error": "Session ID eksik."}

    session = session_store.get_session(session_id)
    if not session:
        return {"error": "Geçersiz veya süresi dolmuş session."}

    project_name = session.project_name
    llm_model = llm_models.get(project_name)
    if llm_model is None:
        return {"error": f"{project_name} için model yüklenmemiş."}

    return await handle_chat(msg, request, None, service_config, session, llm_model)