import gradio as gr from transformers import pipeline # Load QA pipelines (lightweight, free models) qa_model_1 = pipeline("question-answering", model="distilbert-base-cased-distilled-squad") qa_model_2 = pipeline("question-answering", model="deepset/tinyroberta-squad2") qa_model_3 = pipeline("question-answering", model="bert-base-uncased") def answer_question(question, context, model_choice): if model_choice == "🤖 DistilBERT": return qa_model_1(question=question, context=context)["answer"] elif model_choice == "🧠 TinyRoBERTa": return qa_model_2(question=question, context=context)["answer"] elif model_choice == "📚 BERT Base": return qa_model_3(question=question, context=context)["answer"] with gr.Blocks() as demo: # Inject light orange background and dark orange bold heading via HTML gr.HTML("""

Question Answering with Lightweight LLMs

""") with gr.Row(): with gr.Column(): question = gr.Textbox(label="Enter your question") context = gr.Textbox(label="Enter context or passage", lines=6) model_choice = gr.Radio(["🤖 DistilBERT", "🧠 TinyRoBERTa", "📚 BERT Base"], label="Choose a model") button = gr.Button("Get Answer") with gr.Column(): output = gr.Textbox(label="Answer", lines=3) button.click(fn=answer_question, inputs=[question, context, model_choice], outputs=output) gr.HTML("") demo.launch()