File size: 992 Bytes
c6eda8c
 
 
 
 
6f8f4b9
 
c6eda8c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
{
  "_attn_implementation_autoset": true,
  "ae_mode": "token",
  "attn_implementation": null,
  "auto_map": {
    "AutoConfig": "modelling_pisco.COCOMConfig",
    "AutoModel": "modelling_pisco.COCOM"
  },
  "compr_base_model_name": "mistralai/Mistral-7B-Instruct-v0.2",
  "compr_every_n_layer": null,
  "compr_linear_type": "concat",
  "compr_mlp_hidden_dim": 8096,
  "compr_model_name": null,
  "compr_n_layers": null,
  "compr_rate": 16,
  "compr_rms_norm": false,
  "compr_use_mlp": true,
  "decoder_model_name": "mistralai/Mistral-7B-Instruct-v0.2",
  "device_map": null,
  "different_mem_tokens": true,
  "doc_max_length": 128,
  "generation_top_k": 1,
  "kbtc_training": false,
  "load_adapters": true,
  "lora": true,
  "lora_compressor": false,
  "lora_r": 16,
  "lora_r_compressor": 16,
  "max_new_tokens": 128,
  "model_type": "COCOM",
  "optimize_mem_tokens": true,
  "quantization": "no",
  "sep": true,
  "training_form": "both_separately",
  "transformers_version": "4.48.0"
}