import gradio as gr from transformers import AutoModelForQuestionAnswering, AutoTokenizer, pipeline # Модель мен токенизаторды нақты локал жолмен жүктейміз model = AutoModelForQuestionAnswering.from_pretrained("./model") tokenizer = AutoTokenizer.from_pretrained("./model") qa_pipeline = pipeline("question-answering", model=model, tokenizer=tokenizer) def answer_question(question): context = open("data.txt", encoding="utf-8").read() result = qa_pipeline(question=question, context=context) return result["answer"] gr.Interface(fn=answer_question, inputs="text", outputs="text", title="Қазақша Ассистент").launch()