Spaces:
Runtime error
Runtime error
File size: 608 Bytes
d5c6c7d efdd63d 29cdb01 d5c6c7d 29cdb01 d5c6c7d c278a4e d5c6c7d c293898 d5c6c7d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 |
from transformers import pipeline
import os
# Cache setup
os.environ['HF_HOME'] = '/tmp/cache'
model = pipeline(
"text-generation",
model="gpt2",
device=-1 # Force CPU
)
def generate_text(prompt, max_new_tokens=560, max_context=1080):
"""Generate text with precise token control"""
output = model(
prompt,
max_new_tokens=max_new_tokens, # Response tokens (560)
max_length=min(max_context, 1024), # GPT-2's max context is 1024
truncation=True,
pad_token_id=50256 # Explicitly set to avoid warnings
)
return output[0]["generated_text"] |