File size: 6,091 Bytes
138ef71
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a211f96
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
from fastapi import FastAPI, Request, Header
from fastapi.responses import JSONResponse
import threading
from log import log
from service_config import ServiceConfig
from session import SessionStore
from prompt_engine import PromptEngine
from llm_connector import LLMConnector
from api_connector import APIConnector
from validation_engine import ValidationEngine
import traceback
import uvicorn

# Initialize core components
service_config = ServiceConfig()
service_config.load()
session_store = SessionStore()
prompt_engine = PromptEngine(service_config)
llm_connector = LLMConnector(service_config)
api_connector = APIConnector(service_config)
validation_engine = ValidationEngine()

app = FastAPI()






@app.get("/")
def health_check():
    return {"status": "ok"}


@app.post("/start_chat")
def start_chat(request: Request):
    project_name = request.query_params.get("project_name")
    if not project_name:
        return {"error": "Missing project_name parameter."}

    session = session_store.create_session(project_name)
    return {"session_id": session.session_id}


@app.post("/chat")
async def chat(request: Request, x_session_id: str = Header(None)):
    if not x_session_id:
        return {"error": "Missing X-Session-ID header."}

    session = session_store.get_session(x_session_id)
    if not session:
        return {"error": "Invalid or expired session."}

    try:
        body = await request.json()
        user_input = body.get("user_input", "").strip()
        if not user_input:
            return {"error": "Empty user input."}

        session.chat_history.append({"role": "user", "content": user_input})
        project_name = session.project_name

        if session.state == "intent_detection":
            prompt = prompt_engine.build_intent_prompt(project_name)
            llm_response = llm_connector.call_spark(project_name, prompt, session.chat_history)
            if llm_response is None:
                return {"error": "Failed to get intent detection result."}

            intent = llm_response.get("intent")
            params = llm_response.get("params", {})
            missing = llm_response.get("missing", [])

            session.last_intent = intent
            session.variables.update(params)
            session.awaiting_parameters = missing

            if missing:
                session.state = "parameter_extraction"
                return {"response": f"Please provide: {', '.join(missing)}"}

            session.state = "validation"

        if session.state == "parameter_extraction":
            prompt = prompt_engine.build_parameter_prompt(project_name, session.last_intent, session.awaiting_parameters)
            llm_response = llm_connector.call_spark(project_name, prompt, session.chat_history)
            if llm_response is None:
                return {"error": "Failed to extract parameters."}

            params = llm_response.get("params", {})
            missing = llm_response.get("missing", [])

            session.variables.update(params)
            session.awaiting_parameters = missing

            if missing:
                return {"response": f"Please provide: {', '.join(missing)}"}

            session.state = "validation"

        if session.state == "validation":
            intent_def = next((i for i in service_config.get_project_intents(project_name) if i["name"] == session.last_intent), None)
            if not intent_def:
                return {"error": f"Intent definition not found: {session.last_intent}"}

            is_valid, errors = validation_engine.validate_parameters(intent_def, session.variables)
            if not is_valid:
                return {"response": " ".join(errors)}

            session.state = "api_call"

        if session.state == "api_call":
            intent_def = next((i for i in service_config.get_project_intents(project_name) if i["name"] == session.last_intent), None)
            api_response = api_connector.call_api(intent_def, session)
            if "fallback" in api_response:
                return {"response": api_response["fallback"]}

            session.state = "humanization"
            session.variables["api_result"] = api_response

        if session.state == "humanization":
            prompt = prompt_engine.build_humanization_prompt(project_name, session.last_intent)
            chat_history = [{"role": "system", "content": str(session.variables["api_result"])}]
            humanized_response = llm_connector.call_spark(project_name, prompt, chat_history)
            if humanized_response is None:
                return {"error": "Failed to humanize response."}

            session.chat_history.append({"role": "assistant", "content": humanized_response.get("answer")})
            session.state = "intent_detection"  # reset state
            session.last_intent = None
            session.variables = {}
            session.awaiting_parameters = []

            return {"response": humanized_response.get("answer")}

    except Exception as e:
        log(f"❌ Error in chat: {e}")
        traceback.print_exc()
        return JSONResponse(content={"error": str(e)}, status_code=500)


@app.post("/reload_config")
def reload_config():
    def background_reload():
        try:
            service_config.load()
            log("βœ… Service config reloaded successfully.")
        except Exception as e:
            log(f"❌ Error reloading config: {e}")

    threading.Thread(target=background_reload, daemon=True).start()
    return {"status": "accepted", "message": "Config reload started in background."}


@app.post("/run_tests")
def run_tests():
    log("🚦 /run_tests endpoint called. (Test runner needs to be implemented.)")
    return {"status": "not_implemented", "message": "Test runner is not yet implemented."}


if __name__ == "__main__":
    log("🌐 Starting Flare Intent Service...")
    uvicorn.run(app, host="0.0.0.0", port=7860)