Spaces:
Running
Running
from fastapi import FastAPI, Request, Header | |
from fastapi.responses import JSONResponse | |
import threading | |
from log import log | |
from service_config import ServiceConfig | |
from session import SessionStore | |
from prompt_engine import PromptEngine | |
from llm_connector import LLMConnector | |
from api_connector import APIConnector | |
from validation_engine import ValidationEngine | |
import traceback | |
import uvicorn | |
# Initialize core components | |
service_config = ServiceConfig() | |
service_config.load() | |
session_store = SessionStore() | |
prompt_engine = PromptEngine(service_config) | |
llm_connector = LLMConnector(service_config) | |
api_connector = APIConnector(service_config) | |
validation_engine = ValidationEngine() | |
app = FastAPI() | |
def health_check(): | |
return {"status": "ok"} | |
def start_chat(request: Request): | |
project_name = request.query_params.get("project_name") | |
if not project_name: | |
return {"error": "Missing project_name parameter."} | |
session = session_store.create_session(project_name) | |
return {"session_id": session.session_id} | |
async def chat(request: Request, x_session_id: str = Header(None)): | |
if not x_session_id: | |
return {"error": "Missing X-Session-ID header."} | |
session = session_store.get_session(x_session_id) | |
if not session: | |
return {"error": "Invalid or expired session."} | |
try: | |
body = await request.json() | |
user_input = body.get("user_input", "").strip() | |
if not user_input: | |
return {"error": "Empty user input."} | |
session.chat_history.append({"role": "user", "content": user_input}) | |
project_name = session.project_name | |
if session.state == "intent_detection": | |
prompt = prompt_engine.build_intent_prompt(project_name) | |
llm_response = llm_connector.call_spark(project_name, prompt, session.chat_history) | |
if llm_response is None: | |
return {"error": "Failed to get intent detection result."} | |
intent = llm_response.get("intent") | |
params = llm_response.get("params", {}) | |
missing = llm_response.get("missing", []) | |
session.last_intent = intent | |
session.variables.update(params) | |
session.awaiting_parameters = missing | |
if missing: | |
session.state = "parameter_extraction" | |
return {"response": f"Please provide: {', '.join(missing)}"} | |
session.state = "validation" | |
if session.state == "parameter_extraction": | |
prompt = prompt_engine.build_parameter_prompt(project_name, session.last_intent, session.awaiting_parameters) | |
llm_response = llm_connector.call_spark(project_name, prompt, session.chat_history) | |
if llm_response is None: | |
return {"error": "Failed to extract parameters."} | |
params = llm_response.get("params", {}) | |
missing = llm_response.get("missing", []) | |
session.variables.update(params) | |
session.awaiting_parameters = missing | |
if missing: | |
return {"response": f"Please provide: {', '.join(missing)}"} | |
session.state = "validation" | |
if session.state == "validation": | |
intent_def = next((i for i in service_config.get_project_intents(project_name) if i["name"] == session.last_intent), None) | |
if not intent_def: | |
return {"error": f"Intent definition not found: {session.last_intent}"} | |
is_valid, errors = validation_engine.validate_parameters(intent_def, session.variables) | |
if not is_valid: | |
return {"response": " ".join(errors)} | |
session.state = "api_call" | |
if session.state == "api_call": | |
intent_def = next((i for i in service_config.get_project_intents(project_name) if i["name"] == session.last_intent), None) | |
api_response = api_connector.call_api(intent_def, session) | |
if "fallback" in api_response: | |
return {"response": api_response["fallback"]} | |
session.state = "humanization" | |
session.variables["api_result"] = api_response | |
if session.state == "humanization": | |
prompt = prompt_engine.build_humanization_prompt(project_name, session.last_intent) | |
chat_history = [{"role": "system", "content": str(session.variables["api_result"])}] | |
humanized_response = llm_connector.call_spark(project_name, prompt, chat_history) | |
if humanized_response is None: | |
return {"error": "Failed to humanize response."} | |
session.chat_history.append({"role": "assistant", "content": humanized_response.get("answer")}) | |
session.state = "intent_detection" # reset state | |
session.last_intent = None | |
session.variables = {} | |
session.awaiting_parameters = [] | |
return {"response": humanized_response.get("answer")} | |
except Exception as e: | |
log(f"β Error in chat: {e}") | |
traceback.print_exc() | |
return JSONResponse(content={"error": str(e)}, status_code=500) | |
def reload_config(): | |
def background_reload(): | |
try: | |
service_config.load() | |
log("β Service config reloaded successfully.") | |
except Exception as e: | |
log(f"β Error reloading config: {e}") | |
threading.Thread(target=background_reload, daemon=True).start() | |
return {"status": "accepted", "message": "Config reload started in background."} | |
def run_tests(): | |
log("π¦ /run_tests endpoint called. (Test runner needs to be implemented.)") | |
return {"status": "not_implemented", "message": "Test runner is not yet implemented."} | |
if __name__ == "__main__": | |
log("π Starting Flare Intent Service...") | |
uvicorn.run(app, host="0.0.0.0", port=7860) |