|
{ |
|
"_name_or_path": "kawn_cohere_8b_idefics3_siglib14_384", |
|
"architectures": [ |
|
"KawnIdefics3ForConditionalGeneration" |
|
], |
|
"ignore_index": -100, |
|
"image_grid_pinpoints": [ |
|
[ |
|
256, |
|
256 |
|
], |
|
[ |
|
256, |
|
512 |
|
], |
|
[ |
|
256, |
|
768 |
|
], |
|
[ |
|
256, |
|
1024 |
|
], |
|
[ |
|
256, |
|
1280 |
|
], |
|
[ |
|
1280, |
|
256 |
|
], |
|
[ |
|
1024, |
|
256 |
|
], |
|
[ |
|
768, |
|
256 |
|
], |
|
[ |
|
512, |
|
256 |
|
], |
|
[ |
|
512, |
|
512 |
|
], |
|
[ |
|
512, |
|
768 |
|
], |
|
[ |
|
512, |
|
1024 |
|
], |
|
[ |
|
512, |
|
1280 |
|
], |
|
[ |
|
1280, |
|
512 |
|
], |
|
[ |
|
1024, |
|
512 |
|
], |
|
[ |
|
768, |
|
512 |
|
], |
|
[ |
|
768, |
|
1024 |
|
], |
|
[ |
|
1024, |
|
768 |
|
], |
|
[ |
|
1280, |
|
768 |
|
], |
|
[ |
|
768, |
|
1280 |
|
], |
|
[ |
|
768, |
|
768 |
|
], |
|
[ |
|
1024, |
|
1024 |
|
], |
|
[ |
|
1024, |
|
1280 |
|
], |
|
[ |
|
1280, |
|
1024 |
|
], |
|
[ |
|
1280, |
|
1280 |
|
] |
|
], |
|
"image_seq_length": 64, |
|
"image_token_id": 256001, |
|
"model_type": "kawn_idefics3", |
|
"scale_factor": 2, |
|
"text_config": { |
|
"_name_or_path": "google/gemma-2-2b-it", |
|
"architectures": [ |
|
"Gemma2ForCausalLM" |
|
], |
|
"attn_logit_softcapping": 50.0, |
|
"bos_token_id": 2, |
|
"cache_implementation": "hybrid", |
|
"eos_token_id": [ |
|
1, |
|
107 |
|
], |
|
"final_logit_softcapping": 30.0, |
|
"head_dim": 256, |
|
"hidden_act": "gelu_pytorch_tanh", |
|
"hidden_activation": "gelu_pytorch_tanh", |
|
"hidden_size": 2304, |
|
"intermediate_size": 9216, |
|
"model_type": "gemma2", |
|
"num_attention_heads": 8, |
|
"num_hidden_layers": 26, |
|
"num_key_value_heads": 4, |
|
"query_pre_attn_scalar": 256, |
|
"rms_norm_eps": 1e-06, |
|
"sliding_window": 4096, |
|
"torch_dtype": "bfloat16" |
|
}, |
|
"torch_dtype": "bfloat16", |
|
"transformers_version": "4.47.1", |
|
"use_cache": true, |
|
"vision_config": { |
|
"_attn_implementation_autoset": true, |
|
"hidden_size": 768, |
|
"image_size": 256, |
|
"intermediate_size": 3072, |
|
"model_type": "siglip_vision_model", |
|
"num_attention_heads": 12, |
|
"num_hidden_layers": 12, |
|
"patch_size": 16, |
|
"vision_use_head": false |
|
}, |
|
"vision_feature_layer": null |
|
} |
|
|