sbrzz's picture
Upload 16 files
371d9f9 verified
{
"architectures": [
"TinyLlavaForConditionalGeneration"
],
"cache_dir": null,
"connector_type": "mlp2x_gelu",
"hidden_size": 896,
"ignore_index": -100,
"image_aspect_ratio": "square",
"image_token_index": -200,
"llm_model_name_or_path": "Qwen/Qwen2-0.5B-Instruct",
"model_type": "tinyllava",
"num_queries": 128,
"num_resampler_layers": 3,
"pad_token": "<|endoftext|>",
"pad_token_id": 151643,
"resampler_hidden_size": 768,
"text_config": {
"_name_or_path": "Qwen/Qwen2-0.5B-Instruct",
"architectures": [
"Qwen2ForCausalLM"
],
"bos_token_id": 151643,
"eos_token_id": 151645,
"hidden_size": 896,
"intermediate_size": 4864,
"max_position_embeddings": 32768,
"max_window_layers": 24,
"model_type": "qwen2",
"num_attention_heads": 14,
"num_hidden_layers": 24,
"num_key_value_heads": 2,
"rope_theta": 1000000.0,
"sliding_window": 32768,
"tie_word_embeddings": true,
"torch_dtype": "float16",
"use_sliding_window": false,
"vocab_size": 151936
},
"tokenizer_model_max_length": 2048,
"tokenizer_name_or_path": "Qwen/Qwen2-0.5B-Instruct",
"tokenizer_padding_side": "right",
"tokenizer_use_fast": false,
"torch_dtype": "float16",
"transformers_version": "4.39.3",
"tune_type_connector": "full",
"tune_type_llm": "frozen",
"tune_type_vision_tower": "frozen",
"tune_vision_tower_from_layer": 0,
"use_cache": false,
"vision_config": {
"_name_or_path": "facebook/dinov2-small",
"apply_layernorm": true,
"architectures": [
"Dinov2Model"
],
"attention_probs_dropout_prob": 0.0,
"drop_path_rate": 0.0,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.0,
"hidden_size": 384,
"image_size": 518,
"layer_norm_eps": 1e-06,
"layerscale_value": 1.0,
"mlp_ratio": 4,
"model_name_or_path": "facebook/dinov2-small",
"model_name_or_path2": "",
"model_type": "dinov2",
"num_attention_heads": 6,
"num_hidden_layers": 12,
"out_features": [
"stage12"
],
"out_indices": [
12
],
"patch_size": 14,
"qkv_bias": true,
"reshape_hidden_states": true,
"stage_names": [
"stem",
"stage1",
"stage2",
"stage3",
"stage4",
"stage5",
"stage6",
"stage7",
"stage8",
"stage9",
"stage10",
"stage11",
"stage12"
],
"torch_dtype": "float32",
"use_swiglu_ffn": false
},
"vision_feature_layer": -2,
"vision_feature_select_strategy": "patch",
"vision_hidden_size": 384,
"vision_model_name_or_path": "facebook/dinov2-small",
"vision_model_name_or_path2": "",
"vocab_size": 151936
}