Update README.md
Browse files
README.md
CHANGED
@@ -178,12 +178,12 @@ text="zul: Clear all items from the recent documents list"
|
|
178 |
input_ids = tokenizer(text, return_tensors="pt", max_length=1024, truncation=True).to("cuda:0")
|
179 |
with torch.no_grad():
|
180 |
generated_ids = model.generate(**input_ids, num_beams=5, max_new_tokens=len(text), do_sample=True, temperature=0.6, top_p=0.9)
|
181 |
-
print(tokenizer.batch_decode(generated_ids, skip_special_tokens=True, skip_prompt=True)[0])
|
182 |
|
183 |
```
|
184 |
Output:
|
185 |
```bash
|
186 |
-
Vala zonke izinto kusuka kwihlu lamadokhumende elidlule
|
187 |
```
|
188 |
|
189 |
|
|
|
178 |
input_ids = tokenizer(text, return_tensors="pt", max_length=1024, truncation=True).to("cuda:0")
|
179 |
with torch.no_grad():
|
180 |
generated_ids = model.generate(**input_ids, num_beams=5, max_new_tokens=len(text), do_sample=True, temperature=0.6, top_p=0.9)
|
181 |
+
print("Toucan-base - translation:", tokenizer.batch_decode(generated_ids, skip_special_tokens=True, skip_prompt=True)[0])
|
182 |
|
183 |
```
|
184 |
Output:
|
185 |
```bash
|
186 |
+
Toucan-base - translation: Vala zonke izinto kusuka kwihlu lamadokhumende elidlule
|
187 |
```
|
188 |
|
189 |
|