Text Generation
Transformers
PyTorch
English
btlm
causal-lm
Cerebras
BTLM
custom_code
btlm-3b-8k-chat / config.json
avishnevskiy's picture
Upload folder using huggingface_hub
3991431
raw
history blame
1.36 kB
{
"_name_or_path": "cerebras/btlm-3b-8k-dpo",
"activation_function": "swiglu",
"alibi_scaling": null,
"architectures": [
"BTLMLMHeadModel"
],
"attn_pdrop": 0.0,
"auto_map": {
"AutoConfig": "configuration_btlm.BTLMConfig",
"AutoModel": "cerebras/btlm-3b-8k-base--modeling_btlm.BTLMModel",
"AutoModelForCausalLM": "modeling_btlm.BTLMLMHeadModel",
"AutoModelForQuestionAnswering": "cerebras/btlm-3b-8k-base--modeling_btlm.BTLMForQuestionAnswering",
"AutoModelForSequenceClassification": "cerebras/btlm-3b-8k-base--modeling_btlm.BTLMForSequenceClassification",
"AutoModelForTokenClassification": "cerebras/btlm-3b-8k-base--modeling_btlm.BTLMForTokenClassification"
},
"bos_token_id": 50256,
"embd_pdrop": 0.0,
"eos_token_id": 50256,
"initializer_range": 0.073,
"layer_norm_epsilon": 1e-05,
"model_type": "btlm",
"mup_embeddings_scale": 14.6,
"mup_output_alpha": 2.22,
"mup_scale_qk_dot_by_d": true,
"mup_width_scale": 0.1,
"n_embd": 2560,
"n_head": 32,
"n_inner": 6826,
"n_layer": 32,
"n_positions": 8192,
"position_embedding_type": "alibi",
"reorder_and_upcast_attn": false,
"resid_pdrop": 0.0,
"scale_attn_by_inverse_layer_idx": false,
"scale_attn_weights": true,
"torch_dtype": "bfloat16",
"transformers_version": "4.34.1",
"use_cache": true,
"vocab_size": 50257
}