File size: 328 Bytes
32ddfed
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
models:
  - model: gemma-2-2b-it-lora-merge
    parameters:
      density: 1.0
      weight: 1.0
  - model: google/gemma-2-2b-it
    parameters:
      density: 1.0
      weight: 1.0
merge_method: dare_ties
base_model: google/gemma-2-2b
parameters:
  int8_mask: true
  nomalize: true
  weight: 1.0
  density: 1.0
dtype: bfloat16