f8a8-opt-125m / config.json
drisspg's picture
Upload OPTForCausalLM
1e13a36 verified
{
"_name_or_path": "facebook/opt-125m",
"_remove_final_layer_norm": false,
"activation_dropout": 0.0,
"activation_function": "relu",
"architectures": [
"OPTForCausalLM"
],
"attention_dropout": 0.0,
"bos_token_id": 2,
"do_layer_norm_before": true,
"dropout": 0.1,
"enable_bias": true,
"eos_token_id": 2,
"ffn_dim": 3072,
"hidden_size": 768,
"init_std": 0.02,
"layer_norm_elementwise_affine": true,
"layerdrop": 0.0,
"max_position_embeddings": 2048,
"model_type": "opt",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 1,
"prefix": "</s>",
"quantization_config": {
"modules_to_not_convert": null,
"quant_method": "torchao",
"quant_type": {
"class_name": "Float8DynamicActivationFloat8WeightConfig",
"config": {
"activation_dtype": "torch.float8_e4m3fn",
"granularity": {
"type": "PerRow"
},
"mm_config": [
false,
true,
false
],
"weight_dtype": "torch.float8_e4m3fn"
}
},
"quant_type_kwargs": {}
},
"torch_dtype": "bfloat16",
"transformers_version": "4.50.0.dev0",
"use_cache": true,
"vocab_size": 50272,
"word_embed_proj_dim": 768
}