Spaces:
Sleeping
Sleeping
Yaron Koresh
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -571,7 +571,7 @@ def summarize(
|
|
571 |
for index in range(math.ceil( len(words) / 500 )):
|
572 |
|
573 |
chunk = " ".join(words[ index*500:(index+1)*500 ])
|
574 |
-
inputs = tokenizer.encode( prefix + chunk, return_tensors="pt", truncation=False)
|
575 |
|
576 |
while get_tensor_length(inputs) > max_len:
|
577 |
|
@@ -726,7 +726,7 @@ def translate(txt,to_lang="en",from_lang=False):
|
|
726 |
for index in range(math.ceil( len(words) / 500 )):
|
727 |
chunk = " ".join(words[index*500:(index+1)*500])
|
728 |
log(f'DBG translate chunk is {chunk}')
|
729 |
-
inputs = tokenizer.encode(prefix+chunk, return_tensors="pt", truncation=False)
|
730 |
gen = model.generate(inputs,num_beams=3)
|
731 |
toks = tokenizer.decode(gen[0], skip_special_tokens=True)
|
732 |
ret = ret + ("" if ret == "" else " ") + toks
|
|
|
571 |
for index in range(math.ceil( len(words) / 500 )):
|
572 |
|
573 |
chunk = " ".join(words[ index*500:(index+1)*500 ])
|
574 |
+
inputs = tokenizer.encode( prefix + chunk, return_tensors="pt", truncation=False, add_special_tokens=True)
|
575 |
|
576 |
while get_tensor_length(inputs) > max_len:
|
577 |
|
|
|
726 |
for index in range(math.ceil( len(words) / 500 )):
|
727 |
chunk = " ".join(words[index*500:(index+1)*500])
|
728 |
log(f'DBG translate chunk is {chunk}')
|
729 |
+
inputs = tokenizer.encode(prefix+chunk, return_tensors="pt", truncation=False, add_special_tokens=True)
|
730 |
gen = model.generate(inputs,num_beams=3)
|
731 |
toks = tokenizer.decode(gen[0], skip_special_tokens=True)
|
732 |
ret = ret + ("" if ret == "" else " ") + toks
|