File size: 648 Bytes
9da5a38
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
base_model: nitky/Llama-3.1-SuperSwallow-70B-Instruct-v0.1
dtype: bfloat16
merge_method: dare_ties
parameters:
  int8_mask: 1.0
slices:
- sources:
  - layer_range: [0, 80]
    model: nitky/Llama-3.1-SuperSwallow-70B-Instruct-v0.1
  - layer_range: [0, 80]
    model: NousResearch/Hermes-3-Llama-3.1-70B
    parameters:
      density: 0.53
      weight: 0.3
  - layer_range: [0, 80]
    model: Saxo/Linkbricks-Horizon-AI-Japanese-Advanced-V4-70B
    parameters:
      density: 0.53
      weight: 0.4
  - layer_range: [0, 80]
    model: MaziyarPanahi/calme-2.3-llama3.1-70b
    parameters:
      density: 0.53
      weight: 0.3
tokenizer_source: union