qgallouedec HF Staff commited on
Commit
46a5905
·
verified ·
1 Parent(s): 20e2c73

Upload LlavaNextForConditionalGeneration

Browse files
Files changed (2) hide show
  1. config.json +22 -15
  2. model.safetensors +2 -2
config.json CHANGED
@@ -2,6 +2,7 @@
2
  "architectures": [
3
  "LlavaNextForConditionalGeneration"
4
  ],
 
5
  "image_grid_pinpoints": [
6
  [
7
  336,
@@ -30,29 +31,32 @@
30
  "multimodal_projector_bias": true,
31
  "projector_hidden_act": "gelu",
32
  "text_config": {
33
- "attention_bias": false,
 
 
 
34
  "attention_dropout": 0.0,
35
- "head_dim": 4,
36
  "hidden_act": "silu",
37
  "hidden_size": 16,
38
  "initializer_range": 0.02,
39
- "intermediate_size": 32,
40
- "max_position_embeddings": 2048,
41
- "mlp_bias": false,
42
- "model_type": "llama",
43
  "num_attention_heads": 4,
44
  "num_hidden_layers": 2,
45
  "num_key_value_heads": 2,
46
- "pretraining_tp": 1,
47
- "rms_norm_eps": 1e-06,
48
- "rope_scaling": null,
49
- "rope_theta": 10000.0,
50
  "use_cache": true,
51
- "vocab_size": 32005
52
  },
53
  "tie_word_embeddings": false,
54
- "torch_dtype": "float32",
55
  "transformers_version": "4.56.0.dev0",
 
56
  "vision_config": {
57
  "attention_dropout": 0.0,
58
  "hidden_act": "quick_gelu",
@@ -60,15 +64,18 @@
60
  "image_size": 336,
61
  "initializer_factor": 1.0,
62
  "initializer_range": 0.02,
63
- "intermediate_size": 32,
64
  "layer_norm_eps": 1e-05,
65
  "model_type": "clip_vision_model",
66
  "num_attention_heads": 4,
67
  "num_channels": 3,
68
  "num_hidden_layers": 2,
 
69
  "patch_size": 14,
70
- "projection_dim": 8
 
71
  },
72
  "vision_feature_layer": -2,
73
- "vision_feature_select_strategy": "default"
 
74
  }
 
2
  "architectures": [
3
  "LlavaNextForConditionalGeneration"
4
  ],
5
+ "ignore_index": -100,
6
  "image_grid_pinpoints": [
7
  [
8
  336,
 
31
  "multimodal_projector_bias": true,
32
  "projector_hidden_act": "gelu",
33
  "text_config": {
34
+ "_name_or_path": "mistralai/Mistral-7B-Instruct-v0.2",
35
+ "architectures": [
36
+ "MistralForCausalLM"
37
+ ],
38
  "attention_dropout": 0.0,
39
+ "head_dim": null,
40
  "hidden_act": "silu",
41
  "hidden_size": 16,
42
  "initializer_range": 0.02,
43
+ "intermediate_size": 14336,
44
+ "max_position_embeddings": 32768,
45
+ "model_type": "mistral",
 
46
  "num_attention_heads": 4,
47
  "num_hidden_layers": 2,
48
  "num_key_value_heads": 2,
49
+ "rms_norm_eps": 1e-05,
50
+ "rope_theta": 1000000.0,
51
+ "sliding_window": null,
52
+ "torch_dtype": "bfloat16",
53
  "use_cache": true,
54
+ "vocab_size": 32064
55
  },
56
  "tie_word_embeddings": false,
57
+ "torch_dtype": "bfloat16",
58
  "transformers_version": "4.56.0.dev0",
59
+ "use_image_newline_parameter": true,
60
  "vision_config": {
61
  "attention_dropout": 0.0,
62
  "hidden_act": "quick_gelu",
 
64
  "image_size": 336,
65
  "initializer_factor": 1.0,
66
  "initializer_range": 0.02,
67
+ "intermediate_size": 4096,
68
  "layer_norm_eps": 1e-05,
69
  "model_type": "clip_vision_model",
70
  "num_attention_heads": 4,
71
  "num_channels": 3,
72
  "num_hidden_layers": 2,
73
+ "num_key_value_heads": 2,
74
  "patch_size": 14,
75
+ "projection_dim": 768,
76
+ "vocab_size": 32000
77
  },
78
  "vision_feature_layer": -2,
79
+ "vision_feature_select_strategy": "default",
80
+ "vocab_size": 32064
81
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:808b7578d4e2e120e46b755311c7c811c3c31309bb06471cdeded5487fa8bc82
3
- size 4218192
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4f465eb7fce9edb50690acacc93afe884226f04363f36d64e9d7dd101212d15
3
+ size 5399728