{ "alpha_pattern": {}, "auto_mapping": null, "base_model_name_or_path": "mistralai/Mistral-7B-v0.1", "bias": "none", "fan_in_fan_out": false, "inference_mode": true, "init_lora_weights": true, "layers_pattern": null, "layers_to_transform": [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 ], "lora_alpha": 8, "lora_dropout": 0.01, "modules_to_save": null, "peft_type": "LORA", "r": 2, "rank_pattern": {}, "revision": null, "target_modules": [ "k_proj", "q_proj" ], "task_type": "CAUSAL_LM" }