models: - model: chujiezheng/LLaMA3-iterative-DPO-final-ExPO parameters: weight: 1.0 - model: disinfozone/Disinfo4_mistral-ft-optimized-1218 parameters: weight: 1.0 - model: failspy/Meta-Llama-3-8B-Instruct-abliterated-v3 parameters: weight: 1.0 - model: flammenai/Mahou-1.2a-llama3-8B parameters: weight: 1.0 - model: FPHam/L3-8B-Everything-COT parameters: weight: 1.0 - model: lemon07r/Llama-3-RedMagic2-8B parameters: weight: 1.0 - model: grimjim/Llama-3-Perky-Pat-Instruct-8B parameters: weight: 1.0 merge_method: linear dtype: float16