ciyidogan commited on
Commit
fdebccf
·
verified ·
1 Parent(s): b4a8f29

Update chat_handler.py

Browse files
Files changed (1) hide show
  1. chat_handler.py +5 -31
chat_handler.py CHANGED
@@ -1,10 +1,8 @@
1
  from fastapi import Request
2
  from fastapi.responses import JSONResponse
3
  import traceback
4
- import random
5
  from llm_model import Message, LLMModel
6
  from intent_api import execute_intent
7
- from intent_utils import validate_variable_formats
8
  from parse_llm_blocks import parse_llm_blocks
9
  from log import log
10
 
@@ -49,18 +47,7 @@ async def handle_chat(msg: Message, request: Request, app, service_config, sessi
49
  # Eksik parametre varsa → kullanıcıdan istenecek
50
  if missing:
51
  session.awaiting_variable = missing[0]
52
- return {"response": f"Lütfen {', '.join(missing)} bilgisini belirtir misiniz?"}
53
-
54
- # Parametreleri validasyonla kontrol et
55
- intent_definitions = {i["name"]: i for i in service_config.get_project_intents(project_name)}
56
- data_formats = service_config.data_formats
57
-
58
- variable_format_map = intent_definitions.get(intent, {}).get("variable_formats", {})
59
- is_valid, validation_errors = validate_variable_formats(session.variables, variable_format_map, data_formats)
60
-
61
- if not is_valid:
62
- session.awaiting_variable = list(validation_errors.keys())[0]
63
- return {"response": list(validation_errors.values())[0]}
64
 
65
  # === API çağrısı yap
66
  log("🚀 execute_intent() çağrılıyor...")
@@ -68,29 +55,16 @@ async def handle_chat(msg: Message, request: Request, app, service_config, sessi
68
  intent,
69
  user_input,
70
  session.__dict__,
71
- intent_definitions,
72
- data_formats,
73
  project_name,
74
  service_config
75
  )
76
 
77
  if "reply" in result:
78
- api_reply = result["reply"]
79
- # API cevabını modele geri verip insani cevap iste
80
- response_prompt = f"intent:{intent} response:{api_reply}"
81
- log(f"🤖 API cevabı modele gönderiliyor: {response_prompt}")
82
-
83
- session.chat_history.append({"role": "user", "content": response_prompt})
84
- final_response = await llm_model.generate_response_with_messages(session.chat_history, project_config, system_prompt)
85
- log(f"🤖 Final insani cevap: {final_response}")
86
-
87
- session.chat_history.append({"role": "assistant", "content": final_response})
88
- return {"response": final_response}
89
-
90
- elif "errors" in result:
91
- return {"response": list(result["errors"].values())[0]}
92
  else:
93
- return {"response": random.choice(project_config["fallback_answers"])}
94
 
95
  except Exception as e:
96
  traceback.print_exc()
 
1
  from fastapi import Request
2
  from fastapi.responses import JSONResponse
3
  import traceback
 
4
  from llm_model import Message, LLMModel
5
  from intent_api import execute_intent
 
6
  from parse_llm_blocks import parse_llm_blocks
7
  from log import log
8
 
 
47
  # Eksik parametre varsa → kullanıcıdan istenecek
48
  if missing:
49
  session.awaiting_variable = missing[0]
50
+ return {"response": f"Lütfen {', '.join(missing)} bilgisini belirtir misin?"}
 
 
 
 
 
 
 
 
 
 
 
51
 
52
  # === API çağrısı yap
53
  log("🚀 execute_intent() çağrılıyor...")
 
55
  intent,
56
  user_input,
57
  session.__dict__,
 
 
58
  project_name,
59
  service_config
60
  )
61
 
62
  if "reply" in result:
63
+ return {"response": result["reply"]}
64
+ elif "fallback" in result:
65
+ return {"response": result["fallback"]}
 
 
 
 
 
 
 
 
 
 
 
66
  else:
67
+ return {"response": "Beklenmeyen bir hata oluştu."}
68
 
69
  except Exception as e:
70
  traceback.print_exc()