minpeter commited on
Commit
0e089b5
·
verified ·
1 Parent(s): fe0398e

Upload LlamaForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. model.safetensors +2 -2
config.json CHANGED
@@ -15,7 +15,7 @@
15
  "mlp_bias": false,
16
  "model_type": "llama",
17
  "num_attention_heads": 12,
18
- "num_hidden_layers": 30,
19
  "num_key_value_heads": 4,
20
  "pad_token_id": 0,
21
  "pretraining_tp": 1,
 
15
  "mlp_bias": false,
16
  "model_type": "llama",
17
  "num_attention_heads": 12,
18
+ "num_hidden_layers": 29,
19
  "num_key_value_heads": 4,
20
  "pad_token_id": 0,
21
  "pretraining_tp": 1,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c8d92b997f46d569474fff698d6ca46ff1d2b184fe16e8660b6c19e42490f9b2
3
- size 818107040
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d8162acf107681d2377accf2437cfab50a483674aa58a88d006906878b6d9e7a
3
+ size 794113712