Spaces:
Sleeping
Sleeping
File size: 388 Bytes
78047da 6685c0c 78047da |
1 2 3 4 5 6 7 8 9 10 11 |
from transformers import AutoTokenizer, AutoModelForCausalLM
model_name = "google/gemma-2bgoogle/gemma-2-9b-it"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
def generate_answers(query):
input_ids = tokenizer(query, return_tensors="pt")
output = model.generate(**input_ids)
return tokenizer.decode(output[0]) |