models: # === Core Instruction Following (35% weight allocation) === - model: qingy2024/MwM-22B-Instruct parameters: weight: 1.35 # Increased for stronger instruction adherence :cite[4] density: 0.82 # Sparse enough for creative deviation - model: DigitalSouls/BlackSheep-DigitalSoul-22B parameters: weight: 0.85 # Maintains dark thematic elements density: 0.88 # Higher retention for personality consistency - model: ArliAI/Mistral-Small-22B-ArliAI-RPMax-v1.1 parameters: weight: 0.75 # Boosted for anti-repetition training :cite[6] density: 0.83 # Preserve diverse RP patterns # === Narrative Engine (45% weight allocation) === :cite[4]:cite[6] - model: Kaoeiri/MS-Magpantheonsel-lark-v4x1.6.2RP-Cydonia-vXXX-22B-8 parameters: weight: 0.72 # Balanced mythos building density: 0.68 # Allow concept recombination - model: TheDrummer/Cydonia-22B-v1.3 parameters: weight: 0.40 # Increased for plot continuity density: 0.65 # Encourage narrative surprises - model: Gryphe/Pantheon-RP-Pure-1.6.2-22b-Small parameters: weight: 0.55 # Maintains pure RP characteristics density: 0.70 # Standard retention # === Support Matrix (20% weight allocation) === - model: anthracite-org/magnum-v4-22b parameters: weight: 0.60 # Boosted for atmospheric depth density: 0.72 # Preserve environmental descriptors - model: Saxo/Linkbricks-Horizon-AI-Korean-Superb-22B parameters: weight: 0.35 # Cultural nuance preservation density: 0.75 # High retention for language features - model: allura-org/MS-Meadowlark-22B parameters: weight: 0.38 # Slight boost for natural dialogue flow density: 0.68 # ... (other support models adjusted similarly with 0.3-0.4 weights) merge_method: dare_ties base_model: unsloth/Mistral-Small-Instruct-2409 parameters: density: 0.85 # More aggressive sparsity for creative recombination :cite[9] epsilon: 0.12 # Higher resurrection rate for narrative parameters lambda: 1.22 # Controlled divergence amplification normalize: true t: - filter: "self_attn" # Stable attention structure value: 0.92 - filter: "mlp" # Chaotic concept mixing value: 0.68 - filter: "embed_tokens" # Cultural preservation value: 0.95 dtype: bfloat16 random_seed: 314159 # Pi constant for infinite variations