File size: 401 Bytes
aaa27e5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
base_model: migtissera/Tess-v2.5.2-Qwen2-72B
dtype: bfloat16
merge_method: ties
parameters:
  int8_mask: 1.0
  normalize: 0.0
slices:
- sources:
  - layer_range: [0, 80]
    model: cognitivecomputations/dolphin-2.9.2-qwen2-72b
    parameters:
      density: 0.5
      weight: 0.5
  - layer_range: [0, 80]
    model: migtissera/Tess-v2.5.2-Qwen2-72B
    parameters:
      density: 0.5
      weight: 0.5