{ "_name_or_path": "./checkpoint-800", "alpha_pattern": {}, "architectures": [ "MambaForCausalLM" ], "auto_mapping": null, "base_model_name_or_path": "state-spaces/mamba-370m-hf", "bias": "none", "bos_token_id": 0, "conv_kernel": 4, "eos_token_id": 0, "expand": 2, "fan_in_fan_out": false, "hidden_act": "silu", "hidden_size": 768, "inference_mode": true, "init_lora_weights": true, "initializer_range": 0.1, "intermediate_size": 1536, "layer_norm_epsilon": 1e-05, "layer_replication": null, "layers_pattern": null, "layers_to_transform": null, "loftq_config": {}, "lora_alpha": 8, "lora_dropout": 0.0, "megatron_config": null, "megatron_core": "megatron.core", "model_type": "mamba", "modules_to_save": null, "num_hidden_layers": 32, "pad_token_id": 0, "peft_type": "LORA", "r": 8, "rank_pattern": {}, "rescale_prenorm_residual": false, "residual_in_fp32": true, "revision": null, "state_size": 16, "target_modules": [ "embeddings", "x_proj", "out_proj", "in_proj" ], "task_type": "CAUSAL_LM", "time_step_floor": 0.0001, "time_step_init_scheme": "random", "time_step_max": 0.1, "time_step_min": 0.001, "time_step_rank": 48, "time_step_scale": 1.0, "torch_dtype": "float32", "transformers_version": "4.40.1", "use_bias": false, "use_cache": true, "use_conv_bias": true, "use_dora": false, "use_rslora": false, "vocab_size": 50280 }