File size: 447 Bytes
eeadc9e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
slices:
- sources:
  - layer_range: [0, 16]
    model: codellama/CodeLlama-7b-Instruct-hf
- sources:
  - layer_range: [2, 22]
    model: Phind/Phind-CodeLlama-34B-Python-v1
- sources:
  - layer_range: [8, 26]
    model: codellama/CodeLlama-7b-Python-hf
- sources:
  - layer_range: [10, 30]
    model: Phind/Phind-CodeLlama-34B-v2
- sources:
  - layer_range: [14, 32]
    model: Phind/Phind-CodeLlama-34B-v2
merge_method: passthrough
dtype: float16