Spaces:
Paused
Paused
Upload 5 files
Browse files- controllers/auth_controller.py +16 -0
- controllers/config_controller.py +18 -0
- controllers/init.py +1 -0
- controllers/project_controller.py +107 -0
- controllers/test_controller.py +9 -0
controllers/auth_controller.py
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import APIRouter, Request
|
2 |
+
from log import log
|
3 |
+
|
4 |
+
router = APIRouter()
|
5 |
+
|
6 |
+
@router.post("/login")
|
7 |
+
async def login(request: Request):
|
8 |
+
data = await request.json()
|
9 |
+
username = data.get("username")
|
10 |
+
password = data.get("password")
|
11 |
+
if not username or not password:
|
12 |
+
return {"error": "Username and password required."}
|
13 |
+
|
14 |
+
# Buraya hash+salt şifre kontrolü eklenecek (şimdilik basit log)
|
15 |
+
log(f"🔐 Login attempt for user: {username}")
|
16 |
+
return {"message": f"User {username} logged in (mock)."}
|
controllers/config_controller.py
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import APIRouter
|
2 |
+
import threading
|
3 |
+
from service_config import ServiceConfig
|
4 |
+
from log import log
|
5 |
+
|
6 |
+
router = APIRouter()
|
7 |
+
service_config = ServiceConfig()
|
8 |
+
|
9 |
+
@router.post("/reload")
|
10 |
+
def reload_config():
|
11 |
+
def background_reload():
|
12 |
+
try:
|
13 |
+
service_config.load()
|
14 |
+
log("✅ Service config reloaded successfully.")
|
15 |
+
except Exception as e:
|
16 |
+
log(f"❌ Error reloading config: {e}")
|
17 |
+
threading.Thread(target=background_reload, daemon=True).start()
|
18 |
+
return {"status": "accepted", "message": "Config reload started in background."}
|
controllers/init.py
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
# Boş init dosyası, importlar için
|
controllers/project_controller.py
ADDED
@@ -0,0 +1,107 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import APIRouter, Request, Header
|
2 |
+
from session import SessionStore
|
3 |
+
from service_config import ServiceConfig
|
4 |
+
from prompt_engine import PromptEngine
|
5 |
+
from llm_connector import LLMConnector
|
6 |
+
from api_connector import APIConnector
|
7 |
+
from validation_engine import ValidationEngine
|
8 |
+
from log import log
|
9 |
+
import traceback
|
10 |
+
|
11 |
+
router = APIRouter()
|
12 |
+
|
13 |
+
# Core bileşenler
|
14 |
+
service_config = ServiceConfig()
|
15 |
+
service_config.load()
|
16 |
+
session_store = SessionStore()
|
17 |
+
prompt_engine = PromptEngine(service_config)
|
18 |
+
llm_connector = LLMConnector(service_config)
|
19 |
+
api_connector = APIConnector(service_config)
|
20 |
+
validation_engine = ValidationEngine()
|
21 |
+
|
22 |
+
@router.post("/start_chat")
|
23 |
+
def start_chat(request: Request):
|
24 |
+
project_name = request.query_params.get("project_name")
|
25 |
+
if not project_name:
|
26 |
+
return {"error": "Missing project_name parameter."}
|
27 |
+
session = session_store.create_session(project_name)
|
28 |
+
return {"session_id": session.session_id}
|
29 |
+
|
30 |
+
@router.post("/chat")
|
31 |
+
async def chat(request: Request, x_session_id: str = Header(None)):
|
32 |
+
if not x_session_id:
|
33 |
+
return {"error": "Missing X-Session-ID header."}
|
34 |
+
session = session_store.get_session(x_session_id)
|
35 |
+
if not session:
|
36 |
+
return {"error": "Invalid or expired session."}
|
37 |
+
try:
|
38 |
+
body = await request.json()
|
39 |
+
user_input = body.get("user_input", "").strip()
|
40 |
+
if not user_input:
|
41 |
+
return {"error": "Empty user input."}
|
42 |
+
session.chat_history.append({"role": "user", "content": user_input})
|
43 |
+
project_name = session.project_name
|
44 |
+
|
45 |
+
if session.state == "intent_detection":
|
46 |
+
prompt = prompt_engine.build_intent_prompt(project_name)
|
47 |
+
llm_response = llm_connector.call_spark(project_name, prompt, session.chat_history)
|
48 |
+
if llm_response is None:
|
49 |
+
return {"error": "Failed to get intent detection result."}
|
50 |
+
intent = llm_response.get("intent")
|
51 |
+
params = llm_response.get("params", {})
|
52 |
+
missing = llm_response.get("missing", [])
|
53 |
+
session.last_intent = intent
|
54 |
+
session.variables.update(params)
|
55 |
+
session.awaiting_parameters = missing
|
56 |
+
if missing:
|
57 |
+
session.state = "parameter_extraction"
|
58 |
+
return {"response": f"Please provide: {', '.join(missing)}"}
|
59 |
+
session.state = "validation"
|
60 |
+
|
61 |
+
if session.state == "parameter_extraction":
|
62 |
+
prompt = prompt_engine.build_parameter_prompt(project_name, session.last_intent, session.awaiting_parameters)
|
63 |
+
llm_response = llm_connector.call_spark(project_name, prompt, session.chat_history)
|
64 |
+
if llm_response is None:
|
65 |
+
return {"error": "Failed to extract parameters."}
|
66 |
+
params = llm_response.get("params", {})
|
67 |
+
missing = llm_response.get("missing", [])
|
68 |
+
session.variables.update(params)
|
69 |
+
session.awaiting_parameters = missing
|
70 |
+
if missing:
|
71 |
+
return {"response": f"Please provide: {', '.join(missing)}"}
|
72 |
+
session.state = "validation"
|
73 |
+
|
74 |
+
if session.state == "validation":
|
75 |
+
intent_def = next((i for i in service_config.get_project_intents(project_name) if i["name"] == session.last_intent), None)
|
76 |
+
if not intent_def:
|
77 |
+
return {"error": f"Intent definition not found: {session.last_intent}"}
|
78 |
+
is_valid, errors = validation_engine.validate_parameters(intent_def, session.variables)
|
79 |
+
if not is_valid:
|
80 |
+
return {"response": " ".join(errors)}
|
81 |
+
session.state = "api_call"
|
82 |
+
|
83 |
+
if session.state == "api_call":
|
84 |
+
intent_def = next((i for i in service_config.get_project_intents(project_name) if i["name"] == session.last_intent), None)
|
85 |
+
api_response = api_connector.call_api(intent_def, session)
|
86 |
+
if "fallback" in api_response:
|
87 |
+
return {"response": api_response["fallback"]}
|
88 |
+
session.state = "humanization"
|
89 |
+
session.variables["api_result"] = api_response
|
90 |
+
|
91 |
+
if session.state == "humanization":
|
92 |
+
prompt = prompt_engine.build_humanization_prompt(project_name, session.last_intent)
|
93 |
+
chat_history = [{"role": "system", "content": str(session.variables["api_result"])}]
|
94 |
+
humanized_response = llm_connector.call_spark(project_name, prompt, chat_history)
|
95 |
+
if humanized_response is None:
|
96 |
+
return {"error": "Failed to humanize response."}
|
97 |
+
session.chat_history.append({"role": "assistant", "content": humanized_response.get("answer")})
|
98 |
+
session.state = "intent_detection"
|
99 |
+
session.last_intent = None
|
100 |
+
session.variables = {}
|
101 |
+
session.awaiting_parameters = []
|
102 |
+
return {"response": humanized_response.get("answer")}
|
103 |
+
|
104 |
+
except Exception as e:
|
105 |
+
log(f"❌ Error in chat: {e}")
|
106 |
+
traceback.print_exc()
|
107 |
+
return {"error": str(e)}
|
controllers/test_controller.py
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import APIRouter
|
2 |
+
from log import log
|
3 |
+
|
4 |
+
router = APIRouter()
|
5 |
+
|
6 |
+
@router.post("/run_tests")
|
7 |
+
def run_tests():
|
8 |
+
log("🚦 /run_tests endpoint called. (Test runner needs to be implemented.)")
|
9 |
+
return {"status": "not_implemented", "message": "Test runner is not yet implemented."}
|