BINAII commited on
Commit
087160e
·
verified ·
1 Parent(s): 8224c6c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -21
app.py CHANGED
@@ -1,29 +1,16 @@
1
  import gradio as gr
2
- from transformers import AutoModelForMaskedLM, AutoTokenizer, pipeline
3
 
4
- # Load ClinicalBERT model
5
  model_name = "emilyalsentzer/Bio_ClinicalBERT"
6
- tokenizer = AutoTokenizer.from_pretrained(model_name)
7
- model = AutoModelForMaskedLM.from_pretrained(model_name)
8
 
9
- # Create a text generation pipeline
10
- nlp_pipeline = pipeline("fill-mask", model=model, tokenizer=tokenizer)
11
-
12
- # Function to interact with ClinicalBERT
13
  def medical_chatbot(user_input):
14
  if "[MASK]" not in user_input:
15
- return "Please include '[MASK]' in your input. Example: 'The patient is showing signs of [MASK].'"
16
-
17
  response = nlp_pipeline(user_input)
18
- return response[0]["sequence"] # Returns the most likely sentence
19
-
20
- # Gradio UI
21
- interface = gr.Interface(
22
- fn=medical_chatbot,
23
- inputs=gr.Textbox(lines=2, placeholder="Enter medical query with [MASK]..."),
24
- outputs="text",
25
- title="Medical Chatbot",
26
- description="Ask medical questions. Example: 'Patient shows symptoms of [MASK]'."
27
- )
28
 
29
- interface.launch()
 
 
 
1
  import gradio as gr
2
+ from transformers import pipeline
3
 
 
4
  model_name = "emilyalsentzer/Bio_ClinicalBERT"
5
+ nlp_pipeline = pipeline("fill-mask", model=model_name)
 
6
 
 
 
 
 
7
  def medical_chatbot(user_input):
8
  if "[MASK]" not in user_input:
9
+ user_input += " [MASK]"
10
+
11
  response = nlp_pipeline(user_input)
12
+ return {"prediction": response[0]["sequence"], "confidence": response[0]["score"]}
 
 
 
 
 
 
 
 
 
13
 
14
+ # Create an API interface
15
+ interface = gr.Interface(fn=medical_chatbot, inputs="text", outputs="json")
16
+ interface.launch(share=True)