Llama-3-Alpha-Ko-8B-Evo / mergekit_config.yml
kuotient's picture
Upload folder using huggingface_hub
4d899d9 verified
base_model: ./evolve-test-2/input_models/Meta-Llama-3-8B_3118938735
dtype: bfloat16
merge_method: dare_ties
parameters:
int8_mask: 1.0
normalize: 0.0
slices:
- sources:
- layer_range: [0, 8]
model: ./evolve-test-2/input_models/Meta-Llama-3-8B-Instruct_531182157
parameters:
density: 0.7215873011769947
weight: 0.5672371220537422
- layer_range: [0, 8]
model: ./evolve-test-2/input_models/Llama-3-Open-Ko-8B_4015212464
parameters:
density: 0.6535475316726772
weight: 0.4297404766011409
- layer_range: [0, 8]
model: ./evolve-test-2/input_models/Meta-Llama-3-8B_3118938735
- sources:
- layer_range: [8, 16]
model: ./evolve-test-2/input_models/Meta-Llama-3-8B-Instruct_531182157
parameters:
density: 1.0
weight: 1.258044681241929
- layer_range: [8, 16]
model: ./evolve-test-2/input_models/Llama-3-Open-Ko-8B_4015212464
parameters:
density: 0.6160642519526411
weight: 0.4418461544593263
- layer_range: [8, 16]
model: ./evolve-test-2/input_models/Meta-Llama-3-8B_3118938735
- sources:
- layer_range: [16, 24]
model: ./evolve-test-2/input_models/Meta-Llama-3-8B-Instruct_531182157
parameters:
density: 0.5618652598821763
weight: 0.42351796523690527
- layer_range: [16, 24]
model: ./evolve-test-2/input_models/Llama-3-Open-Ko-8B_4015212464
parameters:
density: 0.932056672842011
weight: 0.3737246575450384
- layer_range: [16, 24]
model: ./evolve-test-2/input_models/Meta-Llama-3-8B_3118938735
- sources:
- layer_range: [24, 32]
model: ./evolve-test-2/input_models/Meta-Llama-3-8B-Instruct_531182157
parameters:
density: 0.3934598670872163
weight: 0.003967952578762479
- layer_range: [24, 32]
model: ./evolve-test-2/input_models/Llama-3-Open-Ko-8B_4015212464
parameters:
density: 1.0
weight: 0.4946465398634956
- layer_range: [24, 32]
model: ./evolve-test-2/input_models/Meta-Llama-3-8B_3118938735