mamba-130m / config.json
mjschock's picture
Upload model
b24b8ef verified
raw
history blame
527 Bytes
{
"architectures": [
"MambaModelForCausalLM"
],
"auto_map": {
"AutoConfig": "configuration_mamba.MambaConfig",
"AutoModelForCausalLM": "modeling_mamba.MambaModelForCausalLM"
},
"bias": false,
"conv_bias": true,
"d_conv": 4,
"d_inner": 1536,
"d_model": 768,
"d_state": 16,
"dt_rank": 48,
"expand": 2,
"initializer_range": 0.02,
"model_type": "mamba",
"n_layer": 24,
"pad_vocab_size_multiple": 8,
"torch_dtype": "float32",
"transformers_version": "4.37.2",
"vocab_size": 50280
}