gaur3009 commited on
Commit
b985953
Β·
verified Β·
1 Parent(s): 5e827ce

Create llm.py

Browse files
Files changed (1) hide show
  1. llm.py +10 -0
llm.py ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import AutoModelForCausalLM, AutoTokenizer
2
+
3
+ tokenizer = AutoTokenizer.from_pretrained("distilgpt2")
4
+ model = AutoModelForCausalLM.from_pretrained("distilgpt2")
5
+
6
+ def generate_answer(context, question):
7
+ prompt = f"Context:\n{context}\n\nQuestion: {question}\nAnswer:"
8
+ inputs = tokenizer.encode(prompt, return_tensors='pt', max_length=1024, truncation=True)
9
+ outputs = model.generate(inputs, max_new_tokens=50, do_sample=True)
10
+ return tokenizer.decode(outputs[0], skip_special_tokens=True).strip()