Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -13,7 +13,7 @@ from transformers import pipeline, AutoModelForCausalLM, AutoTokenizer
|
|
13 |
device = "cuda" if torch.cuda.is_available() else "cpu" # ✅ Use GPU if available
|
14 |
|
15 |
# Load fine-tuned GPT-2 model
|
16 |
-
model_path = "cpv2280/
|
17 |
model = AutoModelForCausalLM.from_pretrained(model_path)
|
18 |
tokenizer = AutoTokenizer.from_pretrained(model_path)
|
19 |
|
|
|
13 |
device = "cuda" if torch.cuda.is_available() else "cpu" # ✅ Use GPU if available
|
14 |
|
15 |
# Load fine-tuned GPT-2 model
|
16 |
+
model_path = "cpv2280/gpt2-tinystories-generator" # Update if needed
|
17 |
model = AutoModelForCausalLM.from_pretrained(model_path)
|
18 |
tokenizer = AutoTokenizer.from_pretrained(model_path)
|
19 |
|