Spaces:
Sleeping
Sleeping
Delete tokenizer.json
Browse files- tokenizer.json +0 -11
tokenizer.json
DELETED
@@ -1,11 +0,0 @@
|
|
1 |
-
|
2 |
-
|
3 |
-
|
4 |
-
# Save the tokenizer after training:
|
5 |
-
from tensorflow.keras.preprocessing.text import tokenizer_from_json
|
6 |
-
import json
|
7 |
-
|
8 |
-
with open("tokenizer.json", "w") as f:
|
9 |
-
json.dump(tokenizer.to_json(), f)
|
10 |
-
|
11 |
-
# Upload this file (tokenizer.json) to Hugging Face Spaces.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|