malvin noel commited on
Commit
4cd4316
·
1 Parent(s): ac57303

change script

Browse files
Files changed (1) hide show
  1. scripts/generate_scripts.py +4 -3
scripts/generate_scripts.py CHANGED
@@ -14,9 +14,10 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
14
 
15
 
16
  @spaces.GPU()
17
- def generate_local(model,tokenizer, prompt: str, max_new_tokens: int = 350, temperature: float = 0.7) -> str:
18
- device = torch.device("cuda" if torch.cuda.is_available() else "cpu") # get the device the model is on
19
- inputs = tokenizer(prompt, return_tensors="pt").to(device)
 
20
  output_ids = model.generate(
21
  **inputs,
22
  max_new_tokens=max_new_tokens,
 
14
 
15
 
16
  @spaces.GPU()
17
+ def generate_local(model, tokenizer, prompt: str, max_new_tokens: int = 350, temperature: float = 0.7) -> str:
18
+ inputs = tokenizer(prompt, return_tensors="pt")
19
+ inputs = {k: v.to(model.device) for k, v in inputs.items()} # ⬅️ Safely match model's device
20
+
21
  output_ids = model.generate(
22
  **inputs,
23
  max_new_tokens=max_new_tokens,