HamidOmarov commited on
Commit
b1de6d2
·
verified ·
1 Parent(s): 7715973

Update app/api.py

Browse files
Files changed (1) hide show
  1. app/api.py +60 -89
app/api.py CHANGED
@@ -1,12 +1,12 @@
1
  # app/api.py
2
  from __future__ import annotations
3
 
4
- from typing import List, Optional
 
5
  from collections import deque
6
- from datetime import datetime
7
  from time import perf_counter
8
- import re
9
- import os
10
 
11
  import faiss
12
  from fastapi import FastAPI, UploadFile, File, HTTPException
@@ -16,11 +16,11 @@ from pydantic import BaseModel, Field
16
 
17
  from .rag_system import SimpleRAG, UPLOAD_DIR, INDEX_DIR
18
 
19
- # ------------------------------------------------------------------------------
20
- # App setup
21
- # ------------------------------------------------------------------------------
22
- app = FastAPI(title="RAG API", version="1.3.0")
23
 
 
24
  app.add_middleware(
25
  CORSMiddleware,
26
  allow_origins=["*"],
@@ -31,9 +31,7 @@ app.add_middleware(
31
 
32
  rag = SimpleRAG()
33
 
34
- # ------------------------------------------------------------------------------
35
- # Models
36
- # ------------------------------------------------------------------------------
37
  class UploadResponse(BaseModel):
38
  filename: str
39
  chunks_added: int
@@ -54,34 +52,28 @@ class HistoryResponse(BaseModel):
54
  total_chunks: int
55
  history: List[HistoryItem] = []
56
 
57
- # ------------------------------------------------------------------------------
58
- # Lightweight stats store (in-memory)
59
- # ------------------------------------------------------------------------------
60
  class StatsStore:
61
  def __init__(self):
62
  self.documents_indexed = 0
63
  self.questions_answered = 0
64
  self.latencies_ms = deque(maxlen=500)
65
- # Mon..Sun simple counter (index 0 = today for simplicity)
66
- self.last7_questions = deque([0] * 7, maxlen=7)
67
- self.history = deque(maxlen=50) # recent questions
68
 
69
  def add_docs(self, n: int):
70
  if n > 0:
71
- self.documents_indexed += n
72
 
73
  def add_question(self, latency_ms: Optional[int] = None, q: Optional[str] = None):
74
  self.questions_answered += 1
75
  if latency_ms is not None:
76
  self.latencies_ms.append(int(latency_ms))
77
- if len(self.last7_questions) < 7:
78
- self.last7_questions.appendleft(1)
79
- else:
80
- # attribute to "today" bucket
81
  self.last7_questions[0] += 1
82
  if q:
83
  self.history.appendleft(
84
- {"question": q, "timestamp": datetime.utcnow().isoformat()}
85
  )
86
 
87
  @property
@@ -90,96 +82,76 @@ class StatsStore:
90
 
91
  stats = StatsStore()
92
 
93
- # ------------------------------------------------------------------------------
94
- # Helpers
95
- # ------------------------------------------------------------------------------
96
- _GENERIC_PATTERNS = [
97
- r"\bbased on document context\b",
98
- r"\bappears to be\b",
99
- r"\bgeneral (?:summary|overview)\b",
100
- ]
101
-
102
  _STOPWORDS = {
103
  "the","a","an","of","for","and","or","in","on","to","from","with","by","is","are",
104
- "was","were","be","been","being","at","as","that","this","these","those","it",
105
- "its","into","than","then","so","such","about","over","per","via","vs","within"
106
  }
107
 
108
- def is_generic_answer(text: str) -> bool:
 
 
 
109
  if not text:
110
  return True
111
  low = text.strip().lower()
112
  if len(low) < 15:
113
  return True
114
- for pat in _GENERIC_PATTERNS:
115
- if re.search(pat, low):
116
- return True
117
  return False
118
 
119
- def tokenize(s: str) -> List[str]:
120
- return [w for w in re.findall(r"[a-zA-Z0-9]+", s.lower()) if w and w not in _STOPWORDS and len(w) > 2]
121
-
122
- def extractive_answer(question: str, contexts: List[str], max_chars: int = 500) -> str:
123
- """
124
- Simple keyword-based extractive fallback:
125
- pick sentences containing most question tokens.
126
- """
127
  if not contexts:
128
  return "I couldn't find relevant information in the indexed documents for this question."
 
 
 
129
 
130
- q_tokens = set(tokenize(question))
131
- if not q_tokens:
132
- # if question is e.g. numbers only
133
- q_tokens = set(tokenize(" ".join(contexts[:1])))
134
-
135
- # split into sentences
136
  sentences: List[str] = []
137
  for c in contexts:
138
- c = c or ""
139
- # rough sentence split
140
- for s in re.split(r"(?<=[\.!\?])\s+|\n+", c.strip()):
141
  s = s.strip()
142
  if s:
143
  sentences.append(s)
144
 
145
- if not sentences:
146
- # fallback to first context chunk
147
- return (contexts[0] or "")[:max_chars]
148
-
149
- # score sentences
150
  scored: List[tuple[int, str]] = []
151
  for s in sentences:
152
- toks = set(tokenize(s))
153
- score = len(q_tokens & toks)
154
- scored.append((score, s))
155
-
156
- # pick top sentences with score > 0, otherwise first few sentences
157
  scored.sort(key=lambda x: (x[0], len(x[1])), reverse=True)
158
- picked: List[str] = []
159
 
160
- for score, sent in scored:
161
- if score <= 0 and picked:
 
162
  break
163
- if len(" ".join(picked) + " " + sent) > max_chars:
164
  break
165
- picked.append(sent)
166
 
167
  if not picked:
168
- # no overlap, take first ~max_chars from contexts
169
  return (contexts[0] or "")[:max_chars]
 
 
170
 
171
- return " ".join(picked).strip()
172
-
173
- # ------------------------------------------------------------------------------
174
- # Routes
175
- # ------------------------------------------------------------------------------
176
  @app.get("/")
177
  def root():
178
  return RedirectResponse(url="/docs")
179
 
180
  @app.get("/health")
181
  def health():
182
- return {"status": "ok", "version": app.version, "summarizer": "extractive_en + translate + fallback"}
 
 
 
 
 
 
183
 
184
  @app.get("/debug/translate")
185
  def debug_translate():
@@ -220,34 +192,35 @@ def ask_question(payload: AskRequest):
220
  k = max(1, int(payload.top_k))
221
  t0 = perf_counter()
222
 
223
- # retrieval
224
  try:
225
- hits = rag.search(q, k=k) # expected: List[Tuple[str, float]]
226
  except Exception as e:
227
  raise HTTPException(status_code=500, detail=f"Search failed: {e}")
228
 
229
- contexts = [c for c, _ in (hits or []) if c] or (rag.last_added[:k] if getattr(rag, "last_added", None) else [])
230
 
231
  if not contexts:
232
- stats.add_question(int((perf_counter() - t0) * 1000), q=q)
 
233
  return AskResponse(
234
  answer="I couldn't find relevant information in the indexed documents for this question.",
235
  contexts=[]
236
  )
237
 
238
- # synthesis (LLM or rule-based inside rag)
239
  try:
240
- synthesized = rag.synthesize_answer(q, contexts) or ""
241
  except Exception:
242
  synthesized = ""
243
 
244
- # guard against generic/unchanging answers
245
- if is_generic_answer(synthesized):
246
- synthesized = extractive_answer(q, contexts, max_chars=600)
247
 
248
  latency_ms = int((perf_counter() - t0) * 1000)
249
  stats.add_question(latency_ms, q=q)
250
- return AskResponse(answer=synthesized.strip(), contexts=contexts)
251
 
252
  @app.get("/get_history", response_model=HistoryResponse)
253
  def get_history():
@@ -258,7 +231,6 @@ def get_history():
258
 
259
  @app.get("/stats")
260
  def stats_endpoint():
261
- # keep backward compat fields + add dashboard-friendly metrics
262
  return {
263
  "documents_indexed": stats.documents_indexed,
264
  "questions_answered": stats.questions_answered,
@@ -282,7 +254,6 @@ def reset_index():
282
  os.remove(p)
283
  except FileNotFoundError:
284
  pass
285
- # also reset stats counters to avoid stale analytics
286
  stats.documents_indexed = 0
287
  stats.questions_answered = 0
288
  stats.latencies_ms.clear()
 
1
  # app/api.py
2
  from __future__ import annotations
3
 
4
+ import os
5
+ import re
6
  from collections import deque
7
+ from datetime import datetime, timezone
8
  from time import perf_counter
9
+ from typing import List, Optional, Dict, Any
 
10
 
11
  import faiss
12
  from fastapi import FastAPI, UploadFile, File, HTTPException
 
16
 
17
  from .rag_system import SimpleRAG, UPLOAD_DIR, INDEX_DIR
18
 
19
+ __version__ = "1.3.1"
20
+
21
+ app = FastAPI(title="RAG API", version=__version__)
 
22
 
23
+ # CORS (Streamlit UI üçün)
24
  app.add_middleware(
25
  CORSMiddleware,
26
  allow_origins=["*"],
 
31
 
32
  rag = SimpleRAG()
33
 
34
+ # -------------------- Schemas --------------------
 
 
35
  class UploadResponse(BaseModel):
36
  filename: str
37
  chunks_added: int
 
52
  total_chunks: int
53
  history: List[HistoryItem] = []
54
 
55
+ # -------------------- Stats (in-memory) --------------------
 
 
56
  class StatsStore:
57
  def __init__(self):
58
  self.documents_indexed = 0
59
  self.questions_answered = 0
60
  self.latencies_ms = deque(maxlen=500)
61
+ self.last7_questions = deque([0] * 7, maxlen=7) # sadə günlük sayğac
62
+ self.history = deque(maxlen=50)
 
63
 
64
  def add_docs(self, n: int):
65
  if n > 0:
66
+ self.documents_indexed += int(n)
67
 
68
  def add_question(self, latency_ms: Optional[int] = None, q: Optional[str] = None):
69
  self.questions_answered += 1
70
  if latency_ms is not None:
71
  self.latencies_ms.append(int(latency_ms))
72
+ if len(self.last7_questions) == 7:
 
 
 
73
  self.last7_questions[0] += 1
74
  if q:
75
  self.history.appendleft(
76
+ {"question": q, "timestamp": datetime.now(timezone.utc).isoformat(timespec="seconds")}
77
  )
78
 
79
  @property
 
82
 
83
  stats = StatsStore()
84
 
85
+ # -------------------- Helpers --------------------
 
 
 
 
 
 
 
 
86
  _STOPWORDS = {
87
  "the","a","an","of","for","and","or","in","on","to","from","with","by","is","are",
88
+ "was","were","be","been","being","at","as","that","this","these","those","it","its",
89
+ "into","than","then","so","such","about","over","per","via","vs","within"
90
  }
91
 
92
+ def _tokenize(s: str) -> List[str]:
93
+ return [w for w in re.findall(r"[a-zA-Z0-9]+", s.lower()) if w and w not in _STOPWORDS and len(w) > 2]
94
+
95
+ def _is_generic_answer(text: str) -> bool:
96
  if not text:
97
  return True
98
  low = text.strip().lower()
99
  if len(low) < 15:
100
  return True
101
+ # tipik generik pattern-lər
102
+ if "based on document context" in low or "appears to be" in low:
103
+ return True
104
  return False
105
 
106
+ def _extractive_fallback(question: str, contexts: List[str], max_chars: int = 600) -> str:
107
+ """ Sualın açar sözlərinə əsasən kontekstdən cümlələr seç. """
 
 
 
 
 
 
108
  if not contexts:
109
  return "I couldn't find relevant information in the indexed documents for this question."
110
+ qtok = set(_tokenize(question))
111
+ if not qtok:
112
+ return (contexts[0] or "")[:max_chars]
113
 
114
+ # cümlələrə böl və skorla
 
 
 
 
 
115
  sentences: List[str] = []
116
  for c in contexts:
117
+ for s in re.split(r"(?<=[\.!\?])\s+|\n+", (c or "").strip()):
 
 
118
  s = s.strip()
119
  if s:
120
  sentences.append(s)
121
 
 
 
 
 
 
122
  scored: List[tuple[int, str]] = []
123
  for s in sentences:
124
+ st = set(_tokenize(s))
125
+ scored.append((len(qtok & st), s))
 
 
 
126
  scored.sort(key=lambda x: (x[0], len(x[1])), reverse=True)
 
127
 
128
+ picked: List[str] = []
129
+ for sc, s in scored:
130
+ if sc <= 0 and picked:
131
  break
132
+ if len((" ".join(picked) + " " + s).strip()) > max_chars:
133
  break
134
+ picked.append(s)
135
 
136
  if not picked:
 
137
  return (contexts[0] or "")[:max_chars]
138
+ bullets = "\n".join(f"- {p}" for p in picked)
139
+ return f"Answer (based on document context):\n{bullets}"
140
 
141
+ # -------------------- Routes --------------------
 
 
 
 
142
  @app.get("/")
143
  def root():
144
  return RedirectResponse(url="/docs")
145
 
146
  @app.get("/health")
147
  def health():
148
+ return {
149
+ "status": "ok",
150
+ "version": app.version,
151
+ "summarizer": "extractive_en + translate + keyword_fallback",
152
+ "faiss_ntotal": int(getattr(rag.index, "ntotal", 0)),
153
+ "model_dim": int(getattr(rag.index, "d", rag.embed_dim)),
154
+ }
155
 
156
  @app.get("/debug/translate")
157
  def debug_translate():
 
192
  k = max(1, int(payload.top_k))
193
  t0 = perf_counter()
194
 
195
+ # 1) Həmişə sual embedding-i ilə axtar
196
  try:
197
+ hits = rag.search(q, k=k) # List[Tuple[text, score]]
198
  except Exception as e:
199
  raise HTTPException(status_code=500, detail=f"Search failed: {e}")
200
 
201
+ contexts = [c for c, _ in (hits or []) if c] or (getattr(rag, "last_added", [])[:k] if getattr(rag, "last_added", None) else [])
202
 
203
  if not contexts:
204
+ latency_ms = int((perf_counter() - t0) * 1000)
205
+ stats.add_question(latency_ms, q=q)
206
  return AskResponse(
207
  answer="I couldn't find relevant information in the indexed documents for this question.",
208
  contexts=[]
209
  )
210
 
211
+ # 2) Cavabı sintez et (rag içində LLM/rule-based ola bilər)
212
  try:
213
+ synthesized = (rag.synthesize_answer(q, contexts) or "").strip()
214
  except Exception:
215
  synthesized = ""
216
 
217
+ # 3) Generic görünürsə, extractive fallback
218
+ if _is_generic_answer(synthesized):
219
+ synthesized = _extractive_fallback(q, contexts, max_chars=600)
220
 
221
  latency_ms = int((perf_counter() - t0) * 1000)
222
  stats.add_question(latency_ms, q=q)
223
+ return AskResponse(answer=synthesized, contexts=contexts)
224
 
225
  @app.get("/get_history", response_model=HistoryResponse)
226
  def get_history():
 
231
 
232
  @app.get("/stats")
233
  def stats_endpoint():
 
234
  return {
235
  "documents_indexed": stats.documents_indexed,
236
  "questions_answered": stats.questions_answered,
 
254
  os.remove(p)
255
  except FileNotFoundError:
256
  pass
 
257
  stats.documents_indexed = 0
258
  stats.questions_answered = 0
259
  stats.latencies_ms.clear()