merge_method: della_linear dtype: bfloat16 parameters: epsilon: 0.1 lambda: 1.0 int8_mask: true normalize: true # Base model base_model: ZeroXClem/Llama3.1-Hermes3-SuperNova-8B-L3.1-Purosani-2-8B # Models to merge, including the base model itself again models: - model: ZeroXClem/Llama3.1-Hermes3-SuperNova-8B-L3.1-Purosani-2-8B parameters: weight: 1 density: 0.5 - model: Casual-Autopsy/L3-bluuwhale-SAO-MIX-8B-V1_fp32-merge-calc parameters: weight: 1 density: 0.55