Spaces:
Androidonnxfork
/
Configuration error

Androidonnxfork commited on
Commit
ac2165c
·
1 Parent(s): ff4ede5

Delete folder /app/src/main/model/ with huggingface_hub

Browse files
app/src/main/model/text_encoder/model.ort DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:3e81a168e1aa45501682adf0d9369f982272e8c438e591d7340c96191d2b85bd
3
- size 124370984
 
 
 
 
app/src/main/model/tokenizer/merges.txt DELETED
The diff for this file is too large to render. See raw diff
 
app/src/main/model/tokenizer/special_tokens_map.json DELETED
@@ -1,24 +0,0 @@
1
- {
2
- "bos_token": {
3
- "content": "<|startoftext|>",
4
- "lstrip": false,
5
- "normalized": true,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "<|endoftext|>",
11
- "lstrip": false,
12
- "normalized": true,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": "<|endoftext|>",
17
- "unk_token": {
18
- "content": "<|endoftext|>",
19
- "lstrip": false,
20
- "normalized": true,
21
- "rstrip": false,
22
- "single_word": false
23
- }
24
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app/src/main/model/tokenizer/tokenizer_config.json DELETED
@@ -1,33 +0,0 @@
1
- {
2
- "add_prefix_space": false,
3
- "bos_token": {
4
- "__type": "AddedToken",
5
- "content": "<|startoftext|>",
6
- "lstrip": false,
7
- "normalized": true,
8
- "rstrip": false,
9
- "single_word": false
10
- },
11
- "clean_up_tokenization_spaces": true,
12
- "do_lower_case": true,
13
- "eos_token": {
14
- "__type": "AddedToken",
15
- "content": "<|endoftext|>",
16
- "lstrip": false,
17
- "normalized": true,
18
- "rstrip": false,
19
- "single_word": false
20
- },
21
- "errors": "replace",
22
- "model_max_length": 77,
23
- "pad_token": "<|endoftext|>",
24
- "tokenizer_class": "CLIPTokenizer",
25
- "unk_token": {
26
- "__type": "AddedToken",
27
- "content": "<|endoftext|>",
28
- "lstrip": false,
29
- "normalized": true,
30
- "rstrip": false,
31
- "single_word": false
32
- }
33
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app/src/main/model/tokenizer/vocab.json DELETED
The diff for this file is too large to render. See raw diff
 
app/src/main/model/unet/model.ort DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:30fa6d7d226a101e174c5f1bae44921428142bdfb065a837a1d1c0e907a5be60
3
- size 874459584
 
 
 
 
app/src/main/model/vae_decoder/model.onnx DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:c1f75b8e3ffaf6746dcef1d3556831e94c2e31b78ca5df57d889f22d438a22ea
3
- size 49818164
 
 
 
 
app/src/main/model/vae_decoder/model.ort DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:83e6bad86d86ffab73c5e7d4cb54d03fab36bb2e7cb125c7dc624755bcd2b088
3
- size 49976208
 
 
 
 
app/src/main/model/vae_decoder/model.with_runtime_opt.ort DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:80713a0b4ebbcc9da2c4bacea40cd5830476d3c0ec00778335f00f5c18564926
3
- size 49985560