{ | |
"alpha_pattern": {}, | |
"auto_mapping": null, | |
"base_model_name_or_path": "unsloth/Pixtral-12B-2409-bnb-4bit", | |
"bias": "none", | |
"fan_in_fan_out": false, | |
"inference_mode": true, | |
"init_lora_weights": true, | |
"layer_replication": null, | |
"layers_pattern": null, | |
"layers_to_transform": null, | |
"loftq_config": {}, | |
"lora_alpha": 16, | |
"lora_dropout": 0, | |
"megatron_config": null, | |
"megatron_core": "megatron.core", | |
"modules_to_save": null, | |
"peft_type": "LORA", | |
"r": 16, | |
"rank_pattern": {}, | |
"revision": null, | |
"target_modules": "(?:.*?(?:vision|image|visual|patch|language|text).*?(?:self_attn|attention|attn|mlp|feed_forward|ffn|dense).*?(?:gate_proj|up_proj|down_proj|k_proj|v_proj|q_proj|o_proj).*?)|(?:\\bmodel\\.layers\\.[\\d]{1,}\\.(?:self_attn|attention|attn|mlp|feed_forward|ffn|dense)\\.(?:(?:gate_proj|up_proj|down_proj|k_proj|v_proj|q_proj|o_proj)))", | |
"task_type": "CAUSAL_LM", | |
"use_dora": false, | |
"use_rslora": false | |
} |