cehenderson commited on
Commit
a0d326b
1 Parent(s): dbbcc20

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +22 -1
config.json CHANGED
@@ -1 +1,22 @@
1
- {"auto_mapping": null, "base_model_name_or_path": "TinyPixel/Llama-2-7B-bf16-sharded", "bias": "none", "fan_in_fan_out": false, "inference_mode": true, "init_lora_weights": true, "layers_pattern": null, "layers_to_transform": null, "lora_alpha": 16, "lora_dropout": 0.1, "modules_to_save": null, "peft_type": "LORA", "r": 64, "revision": null, "target_modules": ["q_proj", "v_proj"], "task_type": "CAUSAL_LM"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "auto_mapping": null,
3
+ "base_model_name_or_path": "TinyPixel/Llama-2-7B-bf16-sharded",
4
+ "bias": "none",
5
+ "fan_in_fan_out": false,
6
+ "inference_mode": true,
7
+ "init_lora_weights": true,
8
+ "layers_pattern": null,
9
+ "layers_to_transform": null,
10
+ "lora_alpha": 16,
11
+ "lora_dropout": 0.1,
12
+ "modules_to_save": null,
13
+ "peft_type": "LORA",
14
+ "r": 64,
15
+ "revision": null,
16
+ "target_modules":
17
+ [
18
+ "q_proj",
19
+ "v_proj"
20
+ ],
21
+ "task_type": "CAUSAL_LM"
22
+ }