ciyidogan commited on
Commit
7907a6b
·
verified ·
1 Parent(s): edfa5a2

Update chat_handler.py

Browse files
Files changed (1) hide show
  1. chat_handler.py +13 -7
chat_handler.py CHANGED
@@ -1,26 +1,32 @@
 
 
1
  from fastapi import Request
2
  from fastapi.responses import JSONResponse
3
- import traceback
4
- from llm_model import Message, LLMModel
5
  from intent_api import execute_intent
6
  from parse_llm_blocks import parse_llm_blocks
7
  from log import log
 
8
 
9
- async def handle_chat(msg: Message, request: Request, app, service_config, session, llm_model: LLMModel):
10
  try:
11
  user_input = msg.user_input.strip()
12
  log(f"💬 Kullanıcı input'u: '{user_input}'")
13
 
14
  project_name = session.project_name
15
- project_config = service_config.get_project_llm_config(project_name)
16
  system_prompt = service_config.system_prompt
17
 
18
  # Chat history'ye user mesajını ekle
19
  session.chat_history.append({"role": "user", "content": user_input})
20
 
21
- # === LLM çağrısı
22
- llm_response = await llm_model.generate_response_with_messages(session.chat_history, project_config, system_prompt)
23
- log(f"🤖 LLM cevabı: {llm_response}")
 
 
 
 
 
 
24
 
25
  # === LLM cevabını parse et
26
  parsed = parse_llm_blocks(llm_response)
 
1
+ import traceback
2
+ import requests
3
  from fastapi import Request
4
  from fastapi.responses import JSONResponse
 
 
5
  from intent_api import execute_intent
6
  from parse_llm_blocks import parse_llm_blocks
7
  from log import log
8
+ from llm_request import request_spark_model # ✅ yeni util
9
 
10
+ async def handle_chat(msg, request: Request, app, service_config, session):
11
  try:
12
  user_input = msg.user_input.strip()
13
  log(f"💬 Kullanıcı input'u: '{user_input}'")
14
 
15
  project_name = session.project_name
 
16
  system_prompt = service_config.system_prompt
17
 
18
  # Chat history'ye user mesajını ekle
19
  session.chat_history.append({"role": "user", "content": user_input})
20
 
21
+ # === Spark microservice'e LLM çağrısı
22
+ llm_response = request_spark_model(
23
+ service_config.llm_inference_service_url,
24
+ project_name,
25
+ user_input,
26
+ system_prompt,
27
+ session.chat_history
28
+ )
29
+ log(f"🤖 Spark cevabı: {llm_response}")
30
 
31
  # === LLM cevabını parse et
32
  parsed = parse_llm_blocks(llm_response)