File size: 11,391 Bytes
fd142b1
 
31be862
fd142b1
31be862
fd142b1
31be862
 
fd142b1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31be862
fd142b1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31be862
fd142b1
 
 
 
 
 
31be862
fd142b1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31be862
fd142b1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31be862
 
fd142b1
 
 
31be862
fd142b1
31be862
fd142b1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31be862
fd142b1
 
 
 
31be862
fd142b1
 
 
31be862
fd142b1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31be862
fd142b1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31be862
fd142b1
 
 
 
31be862
 
fd142b1
 
 
31be862
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
import os
import re
import json
import math
import gradio as gr
from typing import List, Dict, Any, Tuple
from together import Together

# -----------------------------
# Tolerant JSON loader (fixes your error)
# -----------------------------
def _remove_trailing_commas(s: str) -> str:
    """Remove trailing commas before ] or } when not inside strings."""
    out = []
    in_str = False
    esc = False
    for i, ch in enumerate(s):
        if in_str:
            out.append(ch)
            if esc:
                esc = False
            elif ch == '\\':
                esc = True
            elif ch == '"':
                in_str = False
            continue
        else:
            if ch == '"':
                in_str = True
                out.append(ch)
                continue
            if ch == ',':
                j = i + 1
                while j < len(s) and s[j] in ' \t\r\n':
                    j += 1
                if j < len(s) and s[j] in ']}':
                    # skip this comma
                    continue
            out.append(ch)
    return ''.join(out)

def _extract_json_objects(text: str) -> List[str]:
    """Extract top-level JSON objects by balancing curly braces, ignoring braces inside strings."""
    objs = []
    in_str = False
    esc = False
    brace_depth = 0
    start = None
    for i, ch in enumerate(text):
        if in_str:
            if esc:
                esc = False
            elif ch == '\\':
                esc = True
            elif ch == '"':
                in_str = False
        else:
            if ch == '"':
                in_str = True
            elif ch == '{':
                if brace_depth == 0:
                    start = i
                brace_depth += 1
            elif ch == '}':
                if brace_depth > 0:
                    brace_depth -= 1
                    if brace_depth == 0 and start is not None:
                        objs.append(text[start:i+1])
                        start = None
    return objs

def safe_load_phpmyadmin_like_json(raw_text: str) -> List[Dict[str, Any]]:
    """
    Attempt strict JSON first; if it fails (e.g., trailing comma issues),
    fall back to extracting individual objects and parsing them.
    Returns a list of objects (header + tables, etc.).
    """
    try:
        return json.loads(raw_text)
    except json.JSONDecodeError:
        # Try removing trailing commas globally
        cleaned = _remove_trailing_commas(raw_text)
        try:
            return json.loads(cleaned)
        except json.JSONDecodeError:
            # Last-resort: parse object-by-object and combine into an array
            chunks = _extract_json_objects(raw_text)
            objs = []
            for ch in chunks:
                s = _remove_trailing_commas(ch)
                try:
                    objs.append(json.loads(s))
                except json.JSONDecodeError:
                    # If a chunk is still bad, skip it rather than crashing
                    # (you can log or collect stats if you want)
                    continue
            return objs

# -----------------------------
# Build a retriever-friendly corpus
# -----------------------------
def flatten_json_to_corpus(docs: List[Dict[str, Any]], max_value_len: int = 500) -> List[Dict[str, Any]]:
    """
    Turn the exported structure into small searchable text chunks.
    For each table row: create a text like: [table=name idx=i] key=value; ...
    """
    corpus = []
    for obj in docs:
        otype = obj.get("type")
        if otype == "table":
            tname = obj.get("name", "unknown_table")
            rows = obj.get("data", [])
            if isinstance(rows, list):
                for i, row in enumerate(rows):
                    if isinstance(row, dict):
                        parts = []
                        for k, v in row.items():
                            val = str(v)
                            if len(val) > max_value_len:
                                val = val[:max_value_len] + "…"
                            parts.append(f"{k}={val}")
                        text = f"[table={tname} idx={i}] " + " ; ".join(parts)
                        corpus.append({"table": tname, "idx": i, "text": text})
        else:
            # Non-table entries (headers, etc.) β€” keep a small representation
            text = json.dumps(obj, ensure_ascii=False)[:2000]
            corpus.append({"table": otype or "meta", "idx": -1, "text": text})
    return corpus

# -----------------------------
# Super-simple keyword retriever
# -----------------------------
def _tokenize(s: str) -> List[str]:
    return re.findall(r"[A-Za-z0-9_]+", s.lower())

def score_doc(query: str, doc_text: str) -> float:
    """
    Very light scorer: term overlap + a tiny BM25-ish adjustment by doc length.
    """
    q_tokens = _tokenize(query)
    d_tokens = _tokenize(doc_text)
    if not d_tokens:
        return 0.0
    q_set = set(q_tokens)
    overlap = sum(1 for t in d_tokens if t in q_set)
    # length normalization
    return overlap / math.log2(len(d_tokens) + 2)

def retrieve_top_k(query: str, corpus: List[Dict[str, Any]], k: int = 10, per_table_cap: int = 5) -> List[Dict[str, Any]]:
    # Score every doc
    scored = [(score_doc(query, c["text"]), c) for c in corpus]
    scored.sort(key=lambda x: x[0], reverse=True)
    # Optional cap per table to avoid one table flooding the context
    table_counts = {}
    out = []
    for s, c in scored:
        if s <= 0:
            continue
        t = c.get("table", "unknown")
        if table_counts.get(t, 0) >= per_table_cap:
            continue
        out.append(c)
        table_counts[t] = table_counts.get(t, 0) + 1
        if len(out) >= k:
            break
    # If nothing scored positive, at least return a couple of diverse items
    if not out:
        out = [c for _, c in scored[:k]]
    return out

# -----------------------------
# Compose prompt for Together model
# -----------------------------
def build_prompt(query: str, passages: List[Dict[str, Any]]) -> str:
    context_blocks = []
    for p in passages:
        context_blocks.append(p["text"])
    context = "\n\n".join(context_blocks)
    prompt = f"""You are a strict JSON-knowledge assistant. Answer ONLY using the provided context from the JSON export.
If the answer is not present, say you could not find it in the JSON.

# User question
{query}

# Context (JSON-derived snippets)
{context}

# Instructions
- Cite table names and ids if helpful (e.g., table=admission_acceptance_lists idx=12).
- Do not invent any data that is not in the context."""

    return prompt

# -----------------------------
# Together client helper
# -----------------------------
def call_together(api_key: str, prompt: str) -> str:
    if not api_key or not api_key.strip():
        return "⚠️ Please enter your Together API key."
    # Set env and client to ensure the SDK picks it up everywhere
    os.environ["TOGETHER_API_KEY"] = api_key.strip()
    client = Together(api_key=api_key.strip())
    resp = client.chat.completions.create(
        model="lgai/exaone-3-5-32b-instruct",
        messages=[{"role": "user", "content": prompt}],
        temperature=0.2,
    )
    return resp.choices[0].message.content

# -----------------------------
# Gradio App
# -----------------------------
with gr.Blocks(title="JSON Chatbot (Together)") as demo:
    gr.Markdown("## πŸ“š JSON Chatbot on Your Dump (Together Exaone 3.5 32B)\nUpload your JSON export and ask questions. The app safely loads imperfect JSON and retrieves the most relevant rows to answer your query.")

    with gr.Row():
        api_key_tb = gr.Textbox(label="Together API Key", type="password", placeholder="Paste your TOGETHER_API_KEY here")
        topk_slider = gr.Slider(3, 20, value=10, step=1, label="Top-K JSON Passages")

    with gr.Row():
        json_file = gr.File(label="Upload JSON export (e.g., phpMyAdmin export)", file_count="single", file_types=[".json"])
        fallback_path = gr.Textbox(label="Or fixed path on disk (optional)", placeholder="e.g., sultanbr_innovativeskills.json")

    with gr.Accordion("Advanced", open=False):
        per_table_cap = gr.Slider(1, 10, value=5, step=1, label="Max passages per table")
        max_val_len = gr.Slider(100, 2000, value=500, step=50, label="Max value length per field (truncation)")

    status = gr.Markdown("")
    chatbot = gr.Chatbot(height=420)
    user_box = gr.Textbox(label="Ask something about the JSON...", placeholder="e.g., What are the admission criteria?")
    clear_btn = gr.Button("Clear", variant="secondary")

    # States
    state_corpus = gr.State([])   # list of {"table","idx","text"}
    state_docs = gr.State([])     # raw list of parsed json objects

    def load_json_to_corpus(file_obj, path_text, max_value_len):
        """
        Load JSON from uploaded file (preferred) or from a disk path (fallback).
        Build corpus for retrieval. Returns (status_text, corpus, docs)
        """
        try:
            if file_obj is not None:
                with open(file_obj.name, "r", encoding="utf-8", errors="replace") as f:
                    raw = f.read()
            else:
                p = (path_text or "").strip()
                if not p:
                    return ("⚠️ Please upload a JSON file or provide a valid path.", [], [])
                with open(p, "r", encoding="utf-8", errors="replace") as f:
                    raw = f.read()

            docs = safe_load_phpmyadmin_like_json(raw)

            if not isinstance(docs, list):
                # Some exports might be a single object β€” normalize to list
                docs = [docs]

            corpus = flatten_json_to_corpus(docs, max_value_len=int(max_value_len))

            return (f"βœ… Loaded {len(docs)} top-level objects; built {len(corpus)} passages.", corpus, docs)

        except Exception as e:
            return (f"❌ Load error: {e}", [], [])

    def ask(api_key, query, history, corpus, k, cap):
        if not corpus:
            return history + [[query, "⚠️ Please upload/load the JSON first."]]
        if not query or not query.strip():
            return history + [["", "⚠️ Please enter a question."]]

        # Retrieve relevant snippets
        top_passages = retrieve_top_k(query, corpus, k=int(k), per_table_cap=int(cap))
        prompt = build_prompt(query, top_passages)

        try:
            answer = call_together(api_key, prompt)
        except Exception as e:
            answer = f"❌ API error: {e}"

        history = history + [[query, answer]]
        return history

    # Wire events
    json_file.upload(
        load_json_to_corpus,
        inputs=[json_file, fallback_path, max_val_len],
        outputs=[status, state_corpus, state_docs],
    )
    fallback_path.change(
        load_json_to_corpus,
        inputs=[json_file, fallback_path, max_val_len],
        outputs=[status, state_corpus, state_docs],
    )

    user_box.submit(
        ask,
        inputs=[api_key_tb, user_box, chatbot, state_corpus, topk_slider, per_table_cap],
        outputs=[chatbot],
    )

    clear_btn.click(lambda: ([], "", "πŸ”„ Ready. Upload JSON or set a path, then ask a question."),
                    inputs=[],
                    outputs=[chatbot, user_box, status])

if __name__ == "__main__":
    demo.launch()