vxbrandon's picture
End of training
e7c5a44 verified
{
"_name_or_path": "meta-llama/Llama-2-7b-hf",
"architectures": [
"SparseLlamaForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "ugly_utils.SparseLlamaConfig",
"AutoModelForCausalLM": "ugly_utils.SparseLlamaForCausalLM"
},
"bos_token_id": 1,
"eos_token_id": 2,
"hidden_act": "silu",
"hidden_size": 4096,
"initializer_range": 0.02,
"intermediate_size": 11008,
"max_position_embeddings": 4096,
"model_type": "sparse_llama",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"pretraining_tp": 1,
"rms_norm_eps": 1e-05,
"rope_scaling": null,
"rope_theta": 10000.0,
"thresholds": [
0.05917752534151077,
0.07723169028759003,
0.09327983111143112,
0.11935807019472122,
0.151454359292984,
0.17953860759735107,
0.1935807317495346,
0.20561684668064117,
0.21163490414619446,
0.21765294671058655,
0.22166499495506287,
0.22166499495506287,
0.225677028298378,
0.23370109498500824,
0.2357071191072464,
0.24172517657279968,
0.2457372099161148,
0.24974924325942993,
0.25376129150390625,
0.2517552673816681,
0.255767285823822,
0.255767285823822,
0.2577733099460602,
0.2577733099460602,
0.2617853581905365,
0.26579737663269043,
0.26980942487716675,
0.2718154489994049,
0.27582746744155884,
0.277833491563797,
0.3039117157459259,
0.4463390111923218
],
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.36.2",
"us_sparse_regularization": false,
"use_cache": false,
"use_graceful_regularization": false,
"use_relu": false,
"use_sparse_model": true,
"use_sparse_predictor": false,
"use_sparse_regularization": false,
"vocab_size": 32000
}