Spaces:
Sleeping
Sleeping
File size: 1,856 Bytes
31be862 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 |
import json
import gradio as gr
from together import Together
# Load your JSON file
with open("sultanbr_innovativeskills.json", "r", encoding="utf-8") as f:
json_data = json.load(f)
# Flatten JSON into a string context
context = json.dumps(json_data, indent=2)
# Chat function
def chat_with_json(api_key, user_message, history):
if not api_key:
return history + [[user_message, "⚠️ Please enter your Together API key first."]]
try:
client = Together(api_key=api_key)
# Construct the system + user prompt
prompt = f"""You are an assistant that answers questions based on the following JSON data:
{context}
User question: {user_message}
Answer clearly using only the relevant JSON information.
"""
response = client.chat.completions.create(
model="lgai/exaone-3-5-32b-instruct",
messages=[{"role": "user", "content": prompt}]
)
bot_reply = response.choices[0].message.content
history.append([user_message, bot_reply])
return history
except Exception as e:
return history + [[user_message, f"⚠️ Error: {str(e)}"]]
# Build Gradio UI
with gr.Blocks() as demo:
gr.Markdown("## 📚 JSON Chatbot (Powered by Together API)")
api_key = gr.Textbox(label="Enter Together API Key", type="password")
chatbot = gr.Chatbot()
msg = gr.Textbox(label="Ask something...")
clear = gr.Button("Clear Chat")
state = gr.State([])
def respond(user_message, chat_history, api_key):
return chat_with_json(api_key, user_message, chat_history)
msg.submit(respond, [msg, state, api_key], state, queue=False).then(
lambda h: (h, ""), state, [chatbot, msg]
)
clear.click(lambda: [], None, state).then(lambda: [], None, chatbot)
# Launch app
if __name__ == "__main__":
demo.launch() |