File size: 1,067 Bytes
fbed214 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
{
"_name_or_path": "/data/workspace/bat-gpt/checkpoints/batgpt-15b-sirius",
"alibi": false,
"architectures": [
"BatGPTForCausalLM"
],
"auto_map": {
"AutoConfig": "configuration_batgpt.BatGPTConfig",
"AutoModel": "modeling_batgpt.BatGPTForCausalLM",
"AutoModelForCausalLM": "modeling_batgpt.BatGPTForCausalLM",
"AutoModelForSeq2SeqLM": "modeling_batgpt.BatGPTForCausalLM"
},
"emb_dim": 5632,
"empty_init": false,
"eos_token_id": 2,
"ffn_hidden_size": 13696,
"hidden_dropout": 0.0,
"hidden_size": 5632,
"layer_norm_epsilon": 1e-05,
"max_seq_len": 32768,
"mlp_activation": "swiglu",
"model_type": "batgpt",
"n_head": 44,
"n_layer": 48,
"num_heads_per_kv": 2,
"pad_token_id": 0,
"pos_emb_impl": "rope",
"prefix_proj": false,
"prefix_size": null,
"qkv_bias": true,
"tie_word_embeddings": false,
"torch_dtype": "float16",
"transformers_version": "4.30.2",
"use_cache": true,
"use_emb_factorization": false,
"use_multi_query_attn": true,
"use_native_attn_impl": true,
"vocab_size": 65536
}
|