File size: 643 Bytes
436cc32
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
models:
  - model: Intel/neural-chat-7b-v3
    parameters:
      weight: 1
  - model: openchat/openchat-3.5-0106
    parameters:
      weight: 1
  - model: BAAI/Infinity-Instruct-7M-Gen-mistral-7B
    parameters:
      weight: 1
  - model: mikewang/PVD-160k-Mistral-7b+Anarchist/mistral_7b_lora_smol_pippa
    parameters:
      weight: 1
  - model: SanjiWatsuki/Kunoichi-DPO-v2-7B+jeiku/Synthetic_Soul_1k_Mistral_128
    parameters:
      weight: 1
  - model: Epiculous/Mika-7B
    parameters:
      weight: 1

merge_method: ties
base_model: kittn/mistral-7B-v0.1-hf
parameters:
  density: 1
  normalize: true
  int8_mask: true
dtype: bfloat16