reab5555 commited on
Commit
d6bf13f
·
verified ·
1 Parent(s): eea7ec4

Update processing.py

Browse files
Files changed (1) hide show
  1. processing.py +1 -1
processing.py CHANGED
@@ -93,7 +93,7 @@ class SequentialAnalyzer:
93
 
94
  input_tokens = len(tokenizer.encode(content))
95
 
96
- max_input_length = 2048
97
  encoded_input = tokenizer.encode(content, truncation=True, max_length=max_input_length)
98
  truncated_content = tokenizer.decode(encoded_input)
99
 
 
93
 
94
  input_tokens = len(tokenizer.encode(content))
95
 
96
+ max_input_length = 1548
97
  encoded_input = tokenizer.encode(content, truncation=True, max_length=max_input_length)
98
  truncated_content = tokenizer.decode(encoded_input)
99