Text Generation
Transformers
English
codegen
mhhmm commited on
Commit
1629be6
·
1 Parent(s): 5e9bf4a

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -7
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "base_model_name_or_path": "Salesforce/codegen-6B-mono",
 
3
  "_name_or_path": "codegen-6B-mono",
4
- "activation_function": "gelu_new",
5
  "architectures": [
6
  "CodeGenForCausalLM"
7
  ],
@@ -13,12 +13,6 @@
13
  "eos_token_id": 0,
14
  "model_type": "codegen",
15
  "pad_token_id": 1023,
16
- "resid_pdrop": 0.0,
17
- "rotary_dim": 4,
18
- "scale_attn_weights": true,
19
- "tie_word_embeddings": false,
20
- "torch_dtype": "float32",
21
- "bias": "none",
22
  "fan_in_fan_out": false,
23
  "inference_mode": true,
24
  "init_lora_weights": true,
 
1
  {
2
  "base_model_name_or_path": "Salesforce/codegen-6B-mono",
3
+ "peft_model_id": "mhhmm/codegen-6B-LoRA",
4
  "_name_or_path": "codegen-6B-mono",
 
5
  "architectures": [
6
  "CodeGenForCausalLM"
7
  ],
 
13
  "eos_token_id": 0,
14
  "model_type": "codegen",
15
  "pad_token_id": 1023,
 
 
 
 
 
 
16
  "fan_in_fan_out": false,
17
  "inference_mode": true,
18
  "init_lora_weights": true,