File size: 2,005 Bytes
af015b7 b8b8f72 af015b7 b8b8f72 af015b7 b8b8f72 af015b7 b8b8f72 af015b7 b8b8f72 af015b7 b8b8f72 af015b7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 |
{
"add_projections": false,
"architectures": [
"JinaCLIPModel"
],
"auto_map": {
"AutoConfig": "jinaai/jina-clip-implementation--configuration_clip.JinaCLIPConfig",
"AutoModel": "jinaai/jina-clip-implementation--modeling_clip.JinaCLIPModel"
},
"initializer_factor": 1.0,
"logit_scale_init_value": 2.6592,
"matryoshka_dimensions": [32, 64, 128, 256, 512, 768, 1024],
"model_type": "jina_clip",
"projection_dim": 1024,
"text_config": {
"default_instruction_task": null,
"default_lora_task": "retrieval.query",
"embed_dim": 1024,
"hf_model_config_kwargs": {
"load_trained_adapters": false,
"lora_adaptations": [
"retrieval.query"
],
"lora_alpha": 4,
"lora_dropout_p": 0.0,
"lora_main_params_trainable": false,
"lora_rank": 4,
"task_instructions": {
"retrieval.query": "Represent the query for retrieving evidence documents: "
},
"use_flash_attn": false
},
"hf_model_name_or_path": "jinaai/jina-embeddings-v3",
"model_type": "jina_clip_text",
"pooler_type": "mean_pooler",
"proj_bias": false,
"proj_type": null
},
"truncate_dim": null,
"use_text_flash_attn": null,
"use_vision_xformers": null,
"vision_config": {
"embed_dim": 1024,
"fused_layer_norm": false,
"head_width": 64,
"image_size": 384,
"intp_freq": true,
"layers": 24,
"ls_init_value": null,
"mlp_ratio": 2.6667,
"model_type": "jina_clip_vision",
"naive_swiglu": true,
"patch_dropout": 0.1,
"patch_size": 14,
"post_norm": false,
"proj_type": null,
"pt_hw_seq_len": 16,
"qkv_bias": true,
"rope_embeddings": true,
"subln": true,
"width": 1024,
"x_attention": false
}
} |