ECE-PRYMMAL-YL-1B-SLERP-V2 / mergekit_config.yml
Youlln's picture
Create mergekit_config.yml
bbbd73b verified
slices:
- sources:
- model: fblgit/miniclaus-qw1.5B-UNAMGS
layer_range: [0, 28] # Adjust based on layer compatibility and model size
- model: Goekdeniz-Guelmez/Josiefied-Qwen2.5-1.5B-Instruct-abliterated-v1
layer_range: [0, 28]
merge_method: slerp
base_model: fblgit/miniclaus-qw1.5B-UNAMGS
parameters:
t:
- filter: self_attn
value: [0, 0.25, 0.5, 0.75, 1] # Adjust based on merging strategy used for high-ranked models
- filter: mlp
value: [1, 0.75, 0.5, 0.25, 0]
- value: 0.65 # Overall interpolation coefficient, tuned for balance
dtype: bfloat16