shukdevdattaEX's picture
Update app.py
fd142b1 verified
raw
history blame
11.4 kB
import os
import re
import json
import math
import gradio as gr
from typing import List, Dict, Any, Tuple
from together import Together
# -----------------------------
# Tolerant JSON loader (fixes your error)
# -----------------------------
def _remove_trailing_commas(s: str) -> str:
"""Remove trailing commas before ] or } when not inside strings."""
out = []
in_str = False
esc = False
for i, ch in enumerate(s):
if in_str:
out.append(ch)
if esc:
esc = False
elif ch == '\\':
esc = True
elif ch == '"':
in_str = False
continue
else:
if ch == '"':
in_str = True
out.append(ch)
continue
if ch == ',':
j = i + 1
while j < len(s) and s[j] in ' \t\r\n':
j += 1
if j < len(s) and s[j] in ']}':
# skip this comma
continue
out.append(ch)
return ''.join(out)
def _extract_json_objects(text: str) -> List[str]:
"""Extract top-level JSON objects by balancing curly braces, ignoring braces inside strings."""
objs = []
in_str = False
esc = False
brace_depth = 0
start = None
for i, ch in enumerate(text):
if in_str:
if esc:
esc = False
elif ch == '\\':
esc = True
elif ch == '"':
in_str = False
else:
if ch == '"':
in_str = True
elif ch == '{':
if brace_depth == 0:
start = i
brace_depth += 1
elif ch == '}':
if brace_depth > 0:
brace_depth -= 1
if brace_depth == 0 and start is not None:
objs.append(text[start:i+1])
start = None
return objs
def safe_load_phpmyadmin_like_json(raw_text: str) -> List[Dict[str, Any]]:
"""
Attempt strict JSON first; if it fails (e.g., trailing comma issues),
fall back to extracting individual objects and parsing them.
Returns a list of objects (header + tables, etc.).
"""
try:
return json.loads(raw_text)
except json.JSONDecodeError:
# Try removing trailing commas globally
cleaned = _remove_trailing_commas(raw_text)
try:
return json.loads(cleaned)
except json.JSONDecodeError:
# Last-resort: parse object-by-object and combine into an array
chunks = _extract_json_objects(raw_text)
objs = []
for ch in chunks:
s = _remove_trailing_commas(ch)
try:
objs.append(json.loads(s))
except json.JSONDecodeError:
# If a chunk is still bad, skip it rather than crashing
# (you can log or collect stats if you want)
continue
return objs
# -----------------------------
# Build a retriever-friendly corpus
# -----------------------------
def flatten_json_to_corpus(docs: List[Dict[str, Any]], max_value_len: int = 500) -> List[Dict[str, Any]]:
"""
Turn the exported structure into small searchable text chunks.
For each table row: create a text like: [table=name idx=i] key=value; ...
"""
corpus = []
for obj in docs:
otype = obj.get("type")
if otype == "table":
tname = obj.get("name", "unknown_table")
rows = obj.get("data", [])
if isinstance(rows, list):
for i, row in enumerate(rows):
if isinstance(row, dict):
parts = []
for k, v in row.items():
val = str(v)
if len(val) > max_value_len:
val = val[:max_value_len] + "…"
parts.append(f"{k}={val}")
text = f"[table={tname} idx={i}] " + " ; ".join(parts)
corpus.append({"table": tname, "idx": i, "text": text})
else:
# Non-table entries (headers, etc.) β€” keep a small representation
text = json.dumps(obj, ensure_ascii=False)[:2000]
corpus.append({"table": otype or "meta", "idx": -1, "text": text})
return corpus
# -----------------------------
# Super-simple keyword retriever
# -----------------------------
def _tokenize(s: str) -> List[str]:
return re.findall(r"[A-Za-z0-9_]+", s.lower())
def score_doc(query: str, doc_text: str) -> float:
"""
Very light scorer: term overlap + a tiny BM25-ish adjustment by doc length.
"""
q_tokens = _tokenize(query)
d_tokens = _tokenize(doc_text)
if not d_tokens:
return 0.0
q_set = set(q_tokens)
overlap = sum(1 for t in d_tokens if t in q_set)
# length normalization
return overlap / math.log2(len(d_tokens) + 2)
def retrieve_top_k(query: str, corpus: List[Dict[str, Any]], k: int = 10, per_table_cap: int = 5) -> List[Dict[str, Any]]:
# Score every doc
scored = [(score_doc(query, c["text"]), c) for c in corpus]
scored.sort(key=lambda x: x[0], reverse=True)
# Optional cap per table to avoid one table flooding the context
table_counts = {}
out = []
for s, c in scored:
if s <= 0:
continue
t = c.get("table", "unknown")
if table_counts.get(t, 0) >= per_table_cap:
continue
out.append(c)
table_counts[t] = table_counts.get(t, 0) + 1
if len(out) >= k:
break
# If nothing scored positive, at least return a couple of diverse items
if not out:
out = [c for _, c in scored[:k]]
return out
# -----------------------------
# Compose prompt for Together model
# -----------------------------
def build_prompt(query: str, passages: List[Dict[str, Any]]) -> str:
context_blocks = []
for p in passages:
context_blocks.append(p["text"])
context = "\n\n".join(context_blocks)
prompt = f"""You are a strict JSON-knowledge assistant. Answer ONLY using the provided context from the JSON export.
If the answer is not present, say you could not find it in the JSON.
# User question
{query}
# Context (JSON-derived snippets)
{context}
# Instructions
- Cite table names and ids if helpful (e.g., table=admission_acceptance_lists idx=12).
- Do not invent any data that is not in the context."""
return prompt
# -----------------------------
# Together client helper
# -----------------------------
def call_together(api_key: str, prompt: str) -> str:
if not api_key or not api_key.strip():
return "⚠️ Please enter your Together API key."
# Set env and client to ensure the SDK picks it up everywhere
os.environ["TOGETHER_API_KEY"] = api_key.strip()
client = Together(api_key=api_key.strip())
resp = client.chat.completions.create(
model="lgai/exaone-3-5-32b-instruct",
messages=[{"role": "user", "content": prompt}],
temperature=0.2,
)
return resp.choices[0].message.content
# -----------------------------
# Gradio App
# -----------------------------
with gr.Blocks(title="JSON Chatbot (Together)") as demo:
gr.Markdown("## πŸ“š JSON Chatbot on Your Dump (Together Exaone 3.5 32B)\nUpload your JSON export and ask questions. The app safely loads imperfect JSON and retrieves the most relevant rows to answer your query.")
with gr.Row():
api_key_tb = gr.Textbox(label="Together API Key", type="password", placeholder="Paste your TOGETHER_API_KEY here")
topk_slider = gr.Slider(3, 20, value=10, step=1, label="Top-K JSON Passages")
with gr.Row():
json_file = gr.File(label="Upload JSON export (e.g., phpMyAdmin export)", file_count="single", file_types=[".json"])
fallback_path = gr.Textbox(label="Or fixed path on disk (optional)", placeholder="e.g., sultanbr_innovativeskills.json")
with gr.Accordion("Advanced", open=False):
per_table_cap = gr.Slider(1, 10, value=5, step=1, label="Max passages per table")
max_val_len = gr.Slider(100, 2000, value=500, step=50, label="Max value length per field (truncation)")
status = gr.Markdown("")
chatbot = gr.Chatbot(height=420)
user_box = gr.Textbox(label="Ask something about the JSON...", placeholder="e.g., What are the admission criteria?")
clear_btn = gr.Button("Clear", variant="secondary")
# States
state_corpus = gr.State([]) # list of {"table","idx","text"}
state_docs = gr.State([]) # raw list of parsed json objects
def load_json_to_corpus(file_obj, path_text, max_value_len):
"""
Load JSON from uploaded file (preferred) or from a disk path (fallback).
Build corpus for retrieval. Returns (status_text, corpus, docs)
"""
try:
if file_obj is not None:
with open(file_obj.name, "r", encoding="utf-8", errors="replace") as f:
raw = f.read()
else:
p = (path_text or "").strip()
if not p:
return ("⚠️ Please upload a JSON file or provide a valid path.", [], [])
with open(p, "r", encoding="utf-8", errors="replace") as f:
raw = f.read()
docs = safe_load_phpmyadmin_like_json(raw)
if not isinstance(docs, list):
# Some exports might be a single object β€” normalize to list
docs = [docs]
corpus = flatten_json_to_corpus(docs, max_value_len=int(max_value_len))
return (f"βœ… Loaded {len(docs)} top-level objects; built {len(corpus)} passages.", corpus, docs)
except Exception as e:
return (f"❌ Load error: {e}", [], [])
def ask(api_key, query, history, corpus, k, cap):
if not corpus:
return history + [[query, "⚠️ Please upload/load the JSON first."]]
if not query or not query.strip():
return history + [["", "⚠️ Please enter a question."]]
# Retrieve relevant snippets
top_passages = retrieve_top_k(query, corpus, k=int(k), per_table_cap=int(cap))
prompt = build_prompt(query, top_passages)
try:
answer = call_together(api_key, prompt)
except Exception as e:
answer = f"❌ API error: {e}"
history = history + [[query, answer]]
return history
# Wire events
json_file.upload(
load_json_to_corpus,
inputs=[json_file, fallback_path, max_val_len],
outputs=[status, state_corpus, state_docs],
)
fallback_path.change(
load_json_to_corpus,
inputs=[json_file, fallback_path, max_val_len],
outputs=[status, state_corpus, state_docs],
)
user_box.submit(
ask,
inputs=[api_key_tb, user_box, chatbot, state_corpus, topk_slider, per_table_cap],
outputs=[chatbot],
)
clear_btn.click(lambda: ([], "", "πŸ”„ Ready. Upload JSON or set a path, then ask a question."),
inputs=[],
outputs=[chatbot, user_box, status])
if __name__ == "__main__":
demo.launch()