14B_lora_ze / handler.py
zemuwen's picture
Update handler.py
fe31f10 verified
raw
history blame
662 Bytes
from typing import Dict, List, Any
from transformers import pipeline
class EndpointHandler():
def __init__(self, path="zemuwen/14B_lora_ze"):
# 初始化方法,加载问答模型
self.pipeline = pipeline("question-answering", model=path)
def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
# 确保输入数据包含 "question" 和 "context"
question = data.get("question", "")
context = data.get("context", "")
# 使用问答模型生成响应
results = self.pipeline(question=question, context=context)
# 返回模型的响应
return results