File size: 1,192 Bytes
10e7eae 0215062 10e7eae |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 |
{
"_name_or_path": "axial_caduceus_1200",
"_target_": "models.configuration_caduceus.AxialCaduceusConfig",
"architectures": [
"AxialCaduceusForMaskedLM"
],
"auto_map": {
"AutoConfig": "configuration_caduceus.AxialCaduceusConfig",
"AutoModelForMaskedLM": "modeling_caduceus.AxialCaduceusForMaskedLM"
},
"bidirectional": "true,",
"bidirectional_strategy": "add",
"bidirectional_weight_tie": true,
"complement_map": null,
"d_intermediate": 0,
"d_model": 128,
"fused_add_norm": true,
"initializer_cfg": {
"initializer_range": 0.02,
"n_residuals_per_layer": 1,
"rescale_prenorm_residual": true
},
"model_name": "axial_caduceus",
"model_type": "axial_caduceus",
"n_layer": 4,
"norm_epsilon": 1e-05,
"pad_vocab_size_multiple": 8,
"pos_embeddings": "None",
"rcps": false,
"residual_in_fp32": false,
"rms_norm": true,
"row_first": true,
"ssm_cfg": {
"bias": false,
"conv_bias": true,
"d_conv": 4,
"d_state": 16,
"dt_init_floor": 0.0001,
"dt_max": 0.1,
"dt_min": 0.001,
"expand": 4
},
"torch_dtype": "float32",
"transformers_version": "4.45.2",
"use_mamba2": true,
"vocab_size": 16
}
|