SmilingTree commited on
Commit
39c629a
·
verified ·
1 Parent(s): e9601e9

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +43 -0
app.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import gradio as gr
3
+ from huggingface_hub import InferenceClient
4
+
5
+ # Get your Hugging Face token from environment variables (set this in your Space's secrets)
6
+ HF_TOKEN = os.getenv("HF_TOKEN")
7
+
8
+ # Initialize the inference client
9
+ client = InferenceClient(
10
+ provider="hf-inference",
11
+ api_key=HF_TOKEN,
12
+ )
13
+
14
+ # Define the function to use in Gradio
15
+ def answer_question(question, context):
16
+ if not HF_TOKEN:
17
+ return "HF_TOKEN not found. Please set it in the environment variables."
18
+
19
+ try:
20
+ result = client.question_answering(
21
+ question=question,
22
+ context=context,
23
+ model="deepset/roberta-base-squad2",
24
+ )
25
+ return result["answer"]
26
+ except Exception as e:
27
+ return f"Error: {str(e)}"
28
+
29
+ # Gradio Interface
30
+ iface = gr.Interface(
31
+ fn=answer_question,
32
+ inputs=[
33
+ gr.Textbox(label="Question"),
34
+ gr.Textbox(label="Context", lines=5),
35
+ ],
36
+ outputs="text",
37
+ title="Question Answering with RoBERTa",
38
+ description="Enter a question and context. The model will find the answer from the context.",
39
+ )
40
+
41
+ # Launch for Hugging Face Space
42
+ if __name__ == "__main__":
43
+ iface.launch()