File size: 932 Bytes
92873de
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
Serialized with:

```python
import torch 
from transformers import CLIPTextConfig, CLIPTextModelWithProjection

def get_dummy_components():
    clip_text_encoder_config = CLIPTextConfig(
        bos_token_id=0,
        eos_token_id=2,
        hidden_size=32,
        intermediate_size=37,
        layer_norm_eps=1e-05,
        num_attention_heads=4,
        num_hidden_layers=5,
        pad_token_id=1,
        vocab_size=1000,
        hidden_act="gelu",
        projection_dim=32,
    )

    torch.manual_seed(0)
    text_encoder = CLIPTextModelWithProjection(clip_text_encoder_config)

    torch.manual_seed(0)
    text_encoder_2 = CLIPTextModelWithProjection(clip_text_encoder_config)

    return text_encoder, text_encoder_2


text_encoder, text_encoder_2 = get_dummy_components()
text_encoder.push_to_hub("hf-internal-testing/tiny-sd3-text_encoder")
text_encoder_2.push_to_hub("hf-internal-testing/tiny-sd3-text_encoder-2")
```