Spaces:
Sleeping
Sleeping
Upload 2 files
Browse files- Pediatric_cleaned.txt +0 -0
- app.py +38 -0
Pediatric_cleaned.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
app.py
ADDED
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
import gradio as gr
|
3 |
+
from medical_chatbot import ColabBioGPTChatbot
|
4 |
+
|
5 |
+
# Instantiate and auto-load the medical data
|
6 |
+
chatbot = ColabBioGPTChatbot(use_gpu=True, use_8bit=True)
|
7 |
+
|
8 |
+
medical_file_uploaded = False
|
9 |
+
|
10 |
+
try:
|
11 |
+
chatbot.load_medical_data("Pediatric_cleaned.txt")
|
12 |
+
medical_file_uploaded = True
|
13 |
+
startup_status = "✅ Medical file 'Pediatric_cleaned.txt' loaded at startup. Ready to chat!"
|
14 |
+
except Exception as e:
|
15 |
+
startup_status = f"❌ Failed to load 'Pediatric_cleaned.txt': {str(e)}"
|
16 |
+
|
17 |
+
def generate_response(user_input):
|
18 |
+
if not medical_file_uploaded:
|
19 |
+
return "⚠️ Medical data failed to load. Please check the file and restart the app."
|
20 |
+
return chatbot.chat(user_input)
|
21 |
+
|
22 |
+
with gr.Blocks() as demo:
|
23 |
+
gr.Markdown("## 🩺 Pediatric Medical Assistant")
|
24 |
+
gr.Markdown(startup_status)
|
25 |
+
|
26 |
+
chatbot_ui = gr.Chatbot(label="🧠 Chat History")
|
27 |
+
user_input = gr.Textbox(placeholder="Ask a pediatric health question...", lines=2, show_label=False)
|
28 |
+
submit_btn = gr.Button("Send")
|
29 |
+
|
30 |
+
def on_submit(user_message, chat_history):
|
31 |
+
bot_response = generate_response(user_message)
|
32 |
+
chat_history.append((user_message, bot_response))
|
33 |
+
return "", chat_history
|
34 |
+
|
35 |
+
user_input.submit(fn=on_submit, inputs=[user_input, chatbot_ui], outputs=[user_input, chatbot_ui])
|
36 |
+
submit_btn.click(fn=on_submit, inputs=[user_input, chatbot_ui], outputs=[user_input, chatbot_ui])
|
37 |
+
|
38 |
+
demo.launch(share=True)
|