hgdgng commited on
Commit
f02b4db
·
verified ·
1 Parent(s): e4c2114

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -3
app.py CHANGED
@@ -1,18 +1,26 @@
1
  import gradio as gr
2
  from transformers import pipeline
3
 
4
- # Load the pre-trained question-answering model from Hugging Face
5
- qa_pipeline = pipeline("question-answering", model="deepset/roberta-base-squad2")
 
 
 
 
 
 
6
 
7
  # Define the function that takes inputs and returns the answer
8
  def answer_question(context, question):
 
 
9
  result = qa_pipeline(question=question, context=context)
10
  return result['answer']
11
 
12
  # Create the Gradio interface
13
  interface = gr.Interface(
14
  fn=answer_question,
15
- inputs=[gr.inputs.Textbox(lines=7, label="Context (Enter the passage)"), gr.inputs.Textbox(lines=2, label="Question")],
16
  outputs="text",
17
  title="Question Answering Model",
18
  description="Ask a question based on the given context.",
 
1
  import gradio as gr
2
  from transformers import pipeline
3
 
4
+ # Try loading the model with a fallback for any loading errors
5
+ try:
6
+ # Load the pre-trained question-answering model from Hugging Face
7
+ qa_pipeline = pipeline("question-answering", model="distilbert-base-uncased-distilled-squad")
8
+ except Exception as e:
9
+ # Print error message for debugging purposes
10
+ print(f"Error loading model: {e}")
11
+ qa_pipeline = None
12
 
13
  # Define the function that takes inputs and returns the answer
14
  def answer_question(context, question):
15
+ if qa_pipeline is None:
16
+ return "Error: Model not loaded."
17
  result = qa_pipeline(question=question, context=context)
18
  return result['answer']
19
 
20
  # Create the Gradio interface
21
  interface = gr.Interface(
22
  fn=answer_question,
23
+ inputs=[gr.Textbox(lines=7, label="Context (Enter the passage)"), gr.Textbox(lines=2, label="Question")],
24
  outputs="text",
25
  title="Question Answering Model",
26
  description="Ask a question based on the given context.",