Amharic-LLM / app.py
redietmolla's picture
Update app.py
6443186 verified
raw
history blame contribute delete
777 Bytes
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
model_name = "redietmolla/amharic_qa_fine_tuned_llama_model"
# Load your fine-tuned model and tokenizer
model = AutoModelForCausalLM.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
def qa_pipeline(question):
qa_pipeline = pipeline("text-generation", model=model, tokenizer=tokenizer)
result = qa_pipeline(f"<s>[INST] {question} [/INST]")
return result[0]['generated_text']
iface = gr.Interface(
fn=qa_pipeline,
inputs=gr.Textbox(lines=2, placeholder="Enter your question here..."),
outputs="text",
title="Amharic QA Model",
description="Ask questions and get answers based on the Amharic context."
)
iface.launch()