|
{ |
|
"_name_or_path": "mistralai/Mistral-7B-v0.1", |
|
"architectures": [ |
|
"SparseMistralforCausalLM" |
|
], |
|
"attention_dropout": 0.0, |
|
"auto_map": { |
|
"AutoConfig": "sparsification_sftt.SparseMistralConfig", |
|
"AutoModelForCausalLM": "sparsification_sftt.SparseMistralforCausalLM" |
|
}, |
|
"bos_token_id": 1, |
|
"eos_token_id": 2, |
|
"hidden_act": "silu", |
|
"hidden_size": 4096, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 14336, |
|
"max_position_embeddings": 32768, |
|
"model_type": "sparse_mistral", |
|
"num_attention_heads": 32, |
|
"num_hidden_layers": 32, |
|
"num_key_value_heads": 8, |
|
"rms_norm_eps": 1e-05, |
|
"rope_theta": 10000.0, |
|
"sliding_window": 4096, |
|
"thresholds": [ |
|
0.0631895586848259, |
|
0.07923770695924759, |
|
0.089267797768116, |
|
0.10732196271419525, |
|
0.12738214433193207, |
|
0.1414242684841156, |
|
0.15546639263629913, |
|
0.16349045932292938, |
|
0.1675025075674057, |
|
0.1675025075674057, |
|
0.1675025075674057, |
|
0.1735205501317978, |
|
0.17552657425403595, |
|
0.1775325983762741, |
|
0.18956869840621948, |
|
0.1935807317495346, |
|
0.19759276509284973, |
|
0.21364091336727142, |
|
0.22367100417613983, |
|
0.23169508576393127, |
|
0.22367100417613983, |
|
0.22968906164169312, |
|
0.22367100417613983, |
|
0.22367100417613983, |
|
0.23169508576393127, |
|
0.23971915245056152, |
|
0.2457372099161148, |
|
0.2577733099460602, |
|
0.2678034007549286, |
|
0.27382147312164307, |
|
0.27582746744155884, |
|
0.277833491563797 |
|
], |
|
"tie_word_embeddings": false, |
|
"torch_dtype": "bfloat16", |
|
"transformers_version": "4.37.2", |
|
"us_sparse_regularization": true, |
|
"use_cache": true, |
|
"use_graceful_regularization": true, |
|
"use_relu": false, |
|
"use_sparse_model": true, |
|
"use_sparse_predictor": false, |
|
"use_sparse_regularization": false, |
|
"vocab_size": 32000 |
|
} |
|
|