File size: 431 Bytes
7e897df
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
base_model: meta-llama/Meta-Llama-3-8B-Instruct
dtype: bfloat16
merge_method: task_arithmetic
parameters:
  normalize: false
slices:
- sources:
  - layer_range: [0, 32]
    model: meta-llama/Meta-Llama-3-8B-Instruct
  - layer_range: [0, 32]
    model: meta-llama/Meta-Llama-3-8B-Instruct
    parameters:
      weight: 1.0
  - layer_range: [0, 32]
    model: failspy/Llama-3-8B-Instruct-MopeyMule
    parameters:
      weight: -1.0