File size: 8,929 Bytes
219f76d
 
 
 
 
a1eca15
 
219f76d
 
 
 
 
 
6c76f79
a44a75e
219f76d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a44a75e
219f76d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f1964d8
219f76d
a44a75e
f1964d8
a44a75e
 
 
 
 
e2f5610
f1964d8
a44a75e
 
f1964d8
ff7c0ce
 
f1964d8
a44a75e
f1964d8
 
a44a75e
 
 
f1964d8
a44a75e
 
f1964d8
a44a75e
 
 
 
 
 
 
 
 
f1964d8
a44a75e
 
 
 
 
 
 
 
 
 
219f76d
 
 
 
 
 
 
 
a44a75e
219f76d
 
 
 
 
 
a44a75e
 
 
219f76d
 
a44a75e
219f76d
8d2ee16
 
 
 
 
 
219f76d
 
 
 
 
f1964d8
a44a75e
219f76d
 
 
 
 
 
 
 
 
 
 
 
 
 
f1964d8
ff7c0ce
 
f1964d8
e2f5610
 
 
219f76d
 
 
 
 
 
 
 
 
 
 
 
e2f5610
 
 
219f76d
 
f1964d8
219f76d
 
 
 
f1964d8
a44a75e
 
 
 
 
 
 
219f76d
 
f1964d8
a44a75e
 
 
219f76d
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
from fastapi import FastAPI, Request, Header, HTTPException, Query
from fastapi.responses import HTMLResponse, JSONResponse
from fastapi.openapi.docs import get_swagger_ui_html
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from datetime import datetime
import uuid
from transformers import pipeline
import logging, traceback
from typing import Optional, List, Union

from model import (
    summarize_review, smart_summarize, detect_industry,
    detect_product_category, detect_emotion, answer_followup, answer_only,
    assess_churn_risk, extract_pain_points  # βœ… Added extract_pain_points
)

app = FastAPI(
    title="🧠 NeuroPulse AI",
    description="Multilingual GenAI for smarter feedback β€” summarization, sentiment, emotion, aspects, Q&A and tags.",
    version="2025.1.0",
    openapi_url="/openapi.json",
    docs_url=None,
    redoc_url="/redoc"
)

app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

logging.basicConfig(level=logging.INFO)
VALID_API_KEY = "my-secret-key"
log_store = []  # βœ… Shared in-memory churn log

@app.get("/", response_class=HTMLResponse)
def root():
    return "<h1>NeuroPulse AI Backend is Running</h1>"

@app.get("/docs", include_in_schema=False)
def custom_swagger_ui():
    return get_swagger_ui_html(
        openapi_url=app.openapi_url,
        title="🧠 Swagger UI - NeuroPulse AI",
        swagger_favicon_url="https://cdn-icons-png.flaticon.com/512/3794/3794616.png",
        swagger_js_url="https://cdn.jsdelivr.net/npm/[email protected]/swagger-ui-bundle.js",
        swagger_css_url="https://cdn.jsdelivr.net/npm/[email protected]/swagger-ui.css",
    )

@app.exception_handler(Exception)
async def exception_handler(request: Request, exc: Exception):
    logging.error(f"Unhandled Exception: {traceback.format_exc()}")
    return JSONResponse(status_code=500, content={"detail": "Internal Server Error. Please contact support."})

# ==== SCHEMAS ====

class ReviewInput(BaseModel):
    text: str
    model: str = "distilbert-base-uncased-finetuned-sst-2-english"
    industry: Optional[str] = None
    aspects: bool = False
    follow_up: Optional[Union[str, List[str]]] = None
    product_category: Optional[str] = None
    device: Optional[str] = None
    intelligence: Optional[bool] = False
    verbosity: Optional[str] = "detailed"

class BulkReviewInput(BaseModel):
    reviews: List[str]
    model: str = "distilbert-base-uncased-finetuned-sst-2-english"
    industry: Optional[List[str]] = None
    aspects: bool = False
    product_category: Optional[List[str]] = None
    device: Optional[List[str]] = None
    follow_up: Optional[List[Union[str, List[str]]]] = None
    intelligence: Optional[bool] = False

class FollowUpRequest(BaseModel):
    text: str
    question: str
    verbosity: Optional[str] = "brief"

# ==== HELPERS ====

def auto_fill(value: Optional[str], fallback: str) -> str:
    if not value or value.lower() == "auto-detect":
        return fallback
    return value

# ==== ENDPOINTS ====

@app.post("/analyze/")
async def analyze(data: ReviewInput, x_api_key: str = Header(None)):
    if x_api_key and x_api_key != VALID_API_KEY:
        raise HTTPException(status_code=401, detail="❌ Invalid API key")

    if len(data.text.split()) < 20:
        raise HTTPException(status_code=400, detail="⚠️ Review too short for analysis (min. 20 words).")

    global log_store

    try:
        # === Generate Summary ===
        summary = (
            summarize_review(data.text, max_len=40, min_len=8)
            if data.verbosity.lower() == "brief"
            else smart_summarize(data.text, n_clusters=2 if data.intelligence else 1)
        )

        # === Sentiment + Emotion ===
        sentiment_pipeline = pipeline("sentiment-analysis", model=data.model)
        sentiment = sentiment_pipeline(data.text)[0]

        emotion = detect_emotion(data.text)


        churn_risk = assess_churn_risk(sentiment["label"], emotion)

        # === Auto-detect metadata ===
        industry = detect_industry(data.text) if not data.industry or "auto" in data.industry.lower() else data.industry
        product_category = detect_product_category(data.text) if not data.product_category or "auto" in data.product_category.lower() else data.product_category

        # === Optional: Pain Points ===
        pain_points = extract_pain_points(data.text) if data.aspects else []

        # === Log entry ===
        log_store.append({
            "timestamp": datetime.now(),
            "product": product_category,
            "churn_risk": churn_risk,
            "user_id": str(uuid.uuid4())
        })
        if len(log_store) > 1000:
            log_store = log_store[-1000:]

        # === Final API Response ===
        response = {
            "summary": summary,
            "sentiment": sentiment,
            "emotion": emotion,
            "product_category": product_category,
            "device": "Web",
            "industry": industry,
            "churn_risk": churn_risk,
            "pain_points": pain_points
        }

        if data.follow_up:
            response["follow_up"] = answer_followup(data.text, data.follow_up, verbosity=data.verbosity)

        return response

    except Exception as e:
        logging.error(f"πŸ”₯ Unexpected analysis failure: {traceback.format_exc()}")
        raise HTTPException(status_code=500, detail="Internal Server Error during analysis.")

@app.post("/followup/")
async def followup(request: FollowUpRequest, x_api_key: str = Header(None)):
    if x_api_key and x_api_key != VALID_API_KEY:
        raise HTTPException(status_code=401, detail="Invalid API key")
    try:
        if not request.question or len(request.text.split()) < 10:
            raise HTTPException(status_code=400, detail="Question or text is too short.")
        return {"answer": answer_only(request.text, request.question)}
    except Exception as e:
        logging.error(f"❌ Follow-up failed: {traceback.format_exc()}")
        raise HTTPException(status_code=500, detail="Follow-up generation failed.")

@app.get("/log/")
async def get_churn_log(x_api_key: str = Header(None)):
    if x_api_key and x_api_key != VALID_API_KEY:
        raise HTTPException(status_code=401, detail="Unauthorized")
    return {"log": log_store}

@app.post("/bulk/")
async def bulk_analyze(data: BulkReviewInput, token: str = Query(None)):
    if token != VALID_API_KEY:
        raise HTTPException(status_code=401, detail="❌ Unauthorized: Invalid API token")

    global log_store

    try:
        results = []
        sentiment_pipeline = pipeline("sentiment-analysis", model=data.model)

        for i, review_text in enumerate(data.reviews):
            if len(review_text.split()) < 20:
                results.append({
                    "review": review_text,
                    "error": "Too short to analyze"
                })
                continue

            summary = smart_summarize(review_text, n_clusters=2 if data.intelligence else 1)
            sentiment = sentiment_pipeline(review_text)[0]

            emotion = detect_emotion(review_text)


            churn = assess_churn_risk(sentiment["label"], emotion)
            pain = extract_pain_points(review_text) if data.aspects else []

            ind = auto_fill(data.industry[i] if data.industry else None, detect_industry(review_text))
            prod = auto_fill(data.product_category[i] if data.product_category else None, detect_product_category(review_text))
            dev = auto_fill(data.device[i] if data.device else None, "Web")

            result = {
                "review": review_text,
                "summary": summary,
                "sentiment": sentiment["label"],
                "score": sentiment["score"],
                "emotion": emotion,
                "industry": ind,
                "product_category": prod,
                "device": dev,
                "churn_risk": churn,
                "pain_points": pain
            }

            # βœ… Optional follow-up
            if data.follow_up and i < len(data.follow_up):
                follow_q = data.follow_up[i]
                result["follow_up"] = answer_followup(review_text, follow_q)

            # βœ… Log churn entry
            log_store.append({
                "timestamp": datetime.now(),
                "product": prod,
                "churn_risk": churn,
                "user_id": str(uuid.uuid4())
            })

            results.append(result)

        # βœ… Cap log size
        if len(log_store) > 1000:
            log_store = log_store[-1000:]

        return {"results": results}

    except Exception as e:
        logging.error(f"πŸ”₯ Bulk processing failed: {traceback.format_exc()}")
        raise HTTPException(status_code=500, detail="Failed to analyze bulk reviews")