L3.3-Prikol-70B-v0.4 / mergekit_config.yml
Nohobby's picture
Upload folder using huggingface_hub
3e671da verified
raw
history blame contribute delete
717 Bytes
models:
- model: unsloth/DeepSeek-R1-Distill-Llama-70B
- model: ArliAI/Llama-3.3-70B-ArliAI-RPMax-v1.4
parameters:
select_topk:
- value: [0.18, 0.3, 0.32, 0.38, 0.32, 0.3]
- model: Nohobby/AbominationSnowPig
parameters:
select_topk:
- value: [0.1, 0.06, 0.05, 0.05, 0.08]
- model: SicariusSicariiStuff/Negative_LLAMA_70B
parameters:
select_topk: 0.17
- model: mergekit-community/L3.3-L3.1-NewTempusBlated-70B
parameters:
select_topk: 0.55
base_model: mergekit-community/L3.3-L3.1-NewTempusBlated-70B
merge_method: sce
parameters:
int8_mask: true
rescale: true
normalize: true
dtype: float32
out_dtype: bfloat16
tokenizer_source: base