File size: 389 Bytes
65b2409 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 |
{
"architectures": [
"ProjectorModel"
],
"auto_map": {
"AutoConfig": "configuration_projector.ProjectorConfig",
"AutoModel": "modeling_projector.ProjectorModel"
},
"bias": true,
"depth": 2,
"hidden_act": "gelu",
"llm_hidden_size": 2048,
"model_type": "projector",
"torch_dtype": "float16",
"transformers_version": "4.40.0",
"visual_hidden_size": 1152
}
|