File size: 1,458 Bytes
4e37ec9
 
059ad72
 
d74cbad
 
4e37ec9
 
 
d74cbad
4e37ec9
059ad72
 
d74cbad
059ad72
 
 
 
4e37ec9
 
d74cbad
 
 
 
 
 
 
 
 
 
 
 
 
4e37ec9
d74cbad
 
 
 
 
 
 
4e37ec9
d74cbad
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
import torch
import gradio as gr
from transformers import pipeline

# Use the Hugging Face model pipeline
question_answer = pipeline("question-answering", model="deepset/roberta-base-squad2")

def read_file_content(file_obj):
    """
    Reads the content of a file object, cleans it, and returns it.
    """
    try:
        with open(file_obj.name, 'r', encoding='utf-8') as file:
            context = file.read().strip()
            return context
    except Exception as e:
        return f"An error occurred: {e}"

def get_answer(file, question):
    context = read_file_content(file)
    if isinstance(context, str) and context.startswith("An error occurred"):
        return context  # Return file reading error

    if not question.strip():
        return "Please provide a valid question."

    try:
        print(f"Context:\n{context}")  # Debug
        print(f"Question: {question}")  # Debug
        answer = question_answer(question=question, context=context)
        return answer["answer"]
    except Exception as e:
        return f"An error occurred during question answering: {e}"

demo = gr.Interface(
    fn=get_answer,
    inputs=[gr.File(label="Upload your file"), gr.Textbox(label="Input your question", lines=1)],
    outputs=[gr.Textbox(label="Answer text", lines=1)],
    title="@GenAILearniverse Project 5: Document Q & A",
    description="This application answers questions based on the uploaded context file."
)

demo.launch()