molmo-hf-7B-D / config.json
Molbap's picture
Molbap HF staff
Upload MolmoForConditionalGeneration
8aaa245 verified
raw
history blame contribute delete
634 Bytes
{
"_name_or_path": "/raid/pablo/Molmo-7B-D-hf-modular",
"architectures": [
"MolmoForConditionalGeneration"
],
"image_token_index": 152069,
"initializer_range": 0.02,
"model_type": "molmo",
"pooling_config": {
"model_type": ""
},
"text_config": {
"attention_bias": true,
"model_type": "molmo_text",
"use_attention_layer_norm": false,
"use_postnorm": false
},
"torch_dtype": "bfloat16",
"transformers_version": "4.48.0.dev0",
"vision_config": {
"model_type": "molmo_vision_model"
},
"vision_feature_layers": [
-2,
-9
],
"vision_feature_select_strategy": "default"
}