File size: 6,192 Bytes
ac5d8ef
 
 
 
 
 
7cc1bf1
ac5d8ef
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ad7d928
 
ac5d8ef
 
 
 
 
 
 
 
 
 
 
b4b574c
 
ac5d8ef
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ad7d928
 
 
7cc1bf1
 
 
 
 
 
ad7d928
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
from fastapi import FastAPI, Request, Header, HTTPException
from fastapi.responses import HTMLResponse, JSONResponse
from fastapi.openapi.utils import get_openapi
from fastapi.openapi.docs import get_swagger_ui_html
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from fastapi import Query
from transformers import pipeline
import os, logging, traceback
from model import summarize_review, smart_summarize, detect_industry, detect_product_category, answer_followup
from typing import Optional, List

app = FastAPI(
    title="\U0001f9e0 NeuroPulse AI",
    description="Multilingual GenAI for smarter feedback β€” summarization, sentiment, emotion, aspects, Q&A and tags.",
    version="2025.1.0",
    openapi_url="/openapi.json",
    docs_url=None,
    redoc_url="/redoc"
)

app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

@app.exception_handler(Exception)
async def exception_handler(request: Request, exc: Exception):
    logging.error(f"Unhandled Exception: {traceback.format_exc()}")
    return JSONResponse(status_code=500, content={"detail": "Internal Server Error. Please contact support."})

@app.get("/docs", include_in_schema=False)
def custom_swagger_ui():
    return get_swagger_ui_html(
        openapi_url=app.openapi_url,
        title="\U0001f9e0 Swagger UI - NeuroPulse AI",
        swagger_favicon_url="https://cdn-icons-png.flaticon.com/512/3794/3794616.png",
        swagger_js_url="https://cdn.jsdelivr.net/npm/[email protected]/swagger-ui-bundle.js",
        swagger_css_url="https://cdn.jsdelivr.net/npm/[email protected]/swagger-ui.css",
    )

@app.get("/", response_class=HTMLResponse)
def root():
    return "<h1>NeuroPulse AI Backend is Running</h1>"

class ReviewInput(BaseModel):
    text: str
    model: str = "distilbert-base-uncased-finetuned-sst-2-english"
    industry: Optional[str] = None
    aspects: bool = False
    follow_up: Optional[str] = None
    product_category: Optional[str] = None
    device: Optional[str] = None
    intelligence: Optional[bool] = False
    verbosity: Optional[str] = "detailed"
    explain: Optional[bool] = False

class BulkReviewInput(BaseModel):
    reviews: List[str]
    model: str = "distilbert-base-uncased-finetuned-sst-2-english"
    industry: Optional[List[str]] = None
    aspects: bool = False
    product_category: Optional[List[str]] = None
    device: Optional[List[str]] = None
    intelligence: Optional[bool] = False 
    
VALID_API_KEY = "my-secret-key"
logging.basicConfig(level=logging.INFO)
sentiment_pipeline = pipeline("sentiment-analysis")

def auto_fill(value: Optional[str], fallback: str) -> str:
    if not value or value.lower() == "auto-detect":
        return fallback
    return value

@app.post("/analyze/")
async def analyze(data: ReviewInput, x_api_key: str = Header(None)):
    if x_api_key and x_api_key != VALID_API_KEY:
    raise HTTPException(status_code=401, detail="❌ Invalid API key")
    if len(data.text.split()) < 20:
        raise HTTPException(status_code=400, detail="⚠️ Review too short for analysis (min. 20 words).")

    try:
        # Smart summary logic based on verbosity and intelligence
        if data.verbosity.lower() == "brief":
            summary = summarize_review(data.text, max_len=40, min_len=8)
        else:
            summary = smart_summarize(data.text, n_clusters=2 if data.intelligence else 1)

        sentiment = sentiment_pipeline(data.text)[0]
        emotion = "joy" 

        # Auto-detection logic
        industry = detect_industry(data.text) if not data.industry or "auto" in data.industry.lower() else data.industry
        product_category = detect_product_category(data.text) if not data.product_category or "auto" in data.product_category.lower() else data.product_category
        device = "Web"

        follow_up_response = None
        if data.follow_up:
            follow_up_response = answer_followup(data.text, data.follow_up, verbosity=data.verbosity)

        return {
            "summary": summary,
            "sentiment": sentiment,
            "emotion": emotion,
            "product_category": product_category,
            "device": device,
            "industry": industry,
            "follow_up": follow_up_response
        }

    except Exception as e:
        logging.error(f"πŸ”₯ Unexpected analysis failure: {traceback.format_exc()}")
        raise HTTPException(status_code=500, detail="Internal Server Error during analysis. Please contact support.")

@app.post("/bulk/")
async def bulk_analyze(
    data: BulkReviewInput,
    token: str = Query(None)
):
    if token != VALID_API_KEY:
        raise HTTPException(status_code=401, detail="❌ Unauthorized: Invalid API token")
    try:
        results = []
        for i, review_text in enumerate(data.reviews):
            if len(review_text.split()) < 20:
                results.append({
                    "review": review_text,
                    "error": "Too short to analyze"
                })
                continue

            summary = smart_summarize(review_text, n_clusters=2 if data.intelligence else 1)
            sentiment = sentiment_pipeline(review_text)[0]
            emotion = "joy"

            ind = auto_fill(data.industry[i] if data.industry else None, detect_industry(review_text))
            prod = auto_fill(data.product_category[i] if data.product_category else None, detect_product_category(review_text))
            dev = auto_fill(data.device[i] if data.device else None, "Web")

            results.append({
                "review": review_text,
                "summary": summary,
                "sentiment": sentiment["label"],
                "score": sentiment["score"],
                "emotion": emotion,
                "industry": ind,
                "product_category": prod,
                "device": dev
            })
        return {"results": results}
    
    except Exception as e:
        logging.error(f"πŸ”₯ Bulk processing failed: {traceback.format_exc()}")
        raise HTTPException(status_code=500, detail="Failed to analyze bulk reviews")