Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -105,7 +105,7 @@ class TweetDatasetProcessor:
|
|
105 |
"""
|
106 |
|
107 |
input_ids = self.tokenizer.encode(prompt, return_tensors='pt')
|
108 |
-
output = self.model.generate(input_ids, max_length=150,max_new_token = 300, num_return_sequences=1, temperature=1.0)
|
109 |
generated_tweet = self.tokenizer.decode(output[0], skip_special_tokens=True).strip()
|
110 |
|
111 |
return generated_tweet
|
|
|
105 |
"""
|
106 |
|
107 |
input_ids = self.tokenizer.encode(prompt, return_tensors='pt')
|
108 |
+
output = self.model.generate(input_ids, max_length=150,max_new_token = 300, num_return_sequences=1,do_sample = True,top_k=50,pad_token_id=tokenizer.eos_token_id, temperature=1.0)
|
109 |
generated_tweet = self.tokenizer.decode(output[0], skip_special_tokens=True).strip()
|
110 |
|
111 |
return generated_tweet
|