MiniMind2-MoE / config.json
jingyaogong's picture
Upload 8 files
010b84a verified
raw
history blame contribute delete
718 Bytes
{
"architectures": [
"MiniMindLM"
],
"auto_map": {
"AutoConfig": "LMConfig.LMConfig",
"AutoModelForCausalLM": "model.MiniMindLM"
},
"aux_loss_alpha": 0.1,
"dim": 640,
"dropout": 0.0,
"flash_attn": true,
"hidden_dim": 1728,
"max_seq_len": 8192,
"model_type": "minimind",
"multiple_of": 64,
"n_heads": 8,
"n_kv_heads": 2,
"n_layers": 8,
"n_routed_experts": 4,
"n_shared_experts": true,
"norm_eps": 1e-05,
"norm_topk_prob": true,
"num_experts_per_tok": 2,
"rope_theta": 1000000.0,
"scoring_func": "softmax",
"seq_aux": true,
"torch_dtype": "float32",
"transformers_version": "4.44.0",
"use_moe": true,
"vocab_size": 6400
}