Schmadge commited on
Commit
86bdaf1
·
1 Parent(s): 8e9508e

Upload MPTForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +3 -3
  2. pytorch_model-00002-of-00002.bin +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "Schmadge/mpt-7b-action-importance-ft",
3
  "architectures": [
4
  "MPTForCausalLM"
5
  ],
@@ -16,8 +16,8 @@
16
  "softmax_scale": null
17
  },
18
  "auto_map": {
19
- "AutoConfig": "configuration_mpt.MPTConfig",
20
- "AutoModelForCausalLM": "modeling_mpt.MPTForCausalLM"
21
  },
22
  "d_model": 4096,
23
  "emb_pdrop": 0,
 
1
  {
2
+ "_name_or_path": "mosaicml/mpt-7b-instruct",
3
  "architectures": [
4
  "MPTForCausalLM"
5
  ],
 
16
  "softmax_scale": null
17
  },
18
  "auto_map": {
19
+ "AutoConfig": "mosaicml/mpt-7b-instruct--configuration_mpt.MPTConfig",
20
+ "AutoModelForCausalLM": "mosaicml/mpt-7b-instruct--modeling_mpt.MPTForCausalLM"
21
  },
22
  "d_model": 4096,
23
  "emb_pdrop": 0,
pytorch_model-00002-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f07a409d34316286ecb473720a300186612dcaf54a895fabeedf0da6970ff00a
3
  size 3355599827
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:633faafe53862d5d0260c8f921a5806da7423655b1371c1299ae848e616b52d2
3
  size 3355599827