wzebrowski commited on
Commit
c3c47d9
·
verified ·
1 Parent(s): 6af650e

Upload adapter_config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. adapter_config.json +26 -0
adapter_config.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "base_model_name_or_path": "wzebrowski/mlx_slm",
3
+ "peft_type": "LORA",
4
+ "r": 16,
5
+ "lora_alpha": 32,
6
+ "target_modules": [
7
+ "encoder.layers.10",
8
+ "encoder.layers.11",
9
+ "encoder.layers.12",
10
+ "encoder.layers.13",
11
+ "encoder.layers.14",
12
+ "encoder.layers.15",
13
+ "encoder.layers.16",
14
+ "encoder.layers.17",
15
+ "encoder.layers.2",
16
+ "encoder.layers.3",
17
+ "encoder.layers.4",
18
+ "encoder.layers.5",
19
+ "encoder.layers.6",
20
+ "encoder.layers.7",
21
+ "encoder.layers.8",
22
+ "encoder.layers.9"
23
+ ],
24
+ "bias": "none",
25
+ "task_type": "CAUSAL_LM"
26
+ }