qgallouedec HF Staff commited on
Commit
476e2e0
·
verified ·
1 Parent(s): e1f4b05

Upload Gemma3ForConditionalGeneration

Browse files
Files changed (3) hide show
  1. config.json +3 -3
  2. generation_config.json +1 -1
  3. model.safetensors +2 -2
config.json CHANGED
@@ -38,18 +38,18 @@
38
  "vocab_size": 268559
39
  },
40
  "torch_dtype": "float32",
41
- "transformers_version": "4.54.0.dev0",
42
  "vision_config": {
43
  "attention_dropout": 0.0,
44
  "hidden_act": "gelu_pytorch_tanh",
45
  "hidden_size": 16,
46
- "image_size": 896,
47
  "intermediate_size": 32,
48
  "layer_norm_eps": 1e-06,
49
  "model_type": "siglip_vision_model",
50
  "num_attention_heads": 4,
51
  "num_channels": 3,
52
  "num_hidden_layers": 2,
53
- "patch_size": 14
54
  }
55
  }
 
38
  "vocab_size": 268559
39
  },
40
  "torch_dtype": "float32",
41
+ "transformers_version": "4.55.0.dev0",
42
  "vision_config": {
43
  "attention_dropout": 0.0,
44
  "hidden_act": "gelu_pytorch_tanh",
45
  "hidden_size": 16,
46
+ "image_size": 224,
47
  "intermediate_size": 32,
48
  "layer_norm_eps": 1e-06,
49
  "model_type": "siglip_vision_model",
50
  "num_attention_heads": 4,
51
  "num_channels": 3,
52
  "num_hidden_layers": 2,
53
+ "patch_size": 16
54
  }
55
  }
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "bos_token_id": 2,
4
  "eos_token_id": 1,
5
  "pad_token_id": 0,
6
- "transformers_version": "4.54.0.dev0"
7
  }
 
3
  "bos_token_id": 2,
4
  "eos_token_id": 1,
5
  "pad_token_id": 0,
6
+ "transformers_version": "4.55.0.dev0"
7
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b27476761998448dcce165bda769422fb89dc175b424ea4d8126da587280fde8
3
- size 9137752
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5314d433f6ec1d6a23b0dbdbe2398dac5001f49de594534b874f0ca753026e28
3
+ size 8899664