File size: 1,991 Bytes
4d899d9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 |
base_model: ./evolve-test-2/input_models/Meta-Llama-3-8B_3118938735
dtype: bfloat16
merge_method: dare_ties
parameters:
int8_mask: 1.0
normalize: 0.0
slices:
- sources:
- layer_range: [0, 8]
model: ./evolve-test-2/input_models/Meta-Llama-3-8B-Instruct_531182157
parameters:
density: 0.7215873011769947
weight: 0.5672371220537422
- layer_range: [0, 8]
model: ./evolve-test-2/input_models/Llama-3-Open-Ko-8B_4015212464
parameters:
density: 0.6535475316726772
weight: 0.4297404766011409
- layer_range: [0, 8]
model: ./evolve-test-2/input_models/Meta-Llama-3-8B_3118938735
- sources:
- layer_range: [8, 16]
model: ./evolve-test-2/input_models/Meta-Llama-3-8B-Instruct_531182157
parameters:
density: 1.0
weight: 1.258044681241929
- layer_range: [8, 16]
model: ./evolve-test-2/input_models/Llama-3-Open-Ko-8B_4015212464
parameters:
density: 0.6160642519526411
weight: 0.4418461544593263
- layer_range: [8, 16]
model: ./evolve-test-2/input_models/Meta-Llama-3-8B_3118938735
- sources:
- layer_range: [16, 24]
model: ./evolve-test-2/input_models/Meta-Llama-3-8B-Instruct_531182157
parameters:
density: 0.5618652598821763
weight: 0.42351796523690527
- layer_range: [16, 24]
model: ./evolve-test-2/input_models/Llama-3-Open-Ko-8B_4015212464
parameters:
density: 0.932056672842011
weight: 0.3737246575450384
- layer_range: [16, 24]
model: ./evolve-test-2/input_models/Meta-Llama-3-8B_3118938735
- sources:
- layer_range: [24, 32]
model: ./evolve-test-2/input_models/Meta-Llama-3-8B-Instruct_531182157
parameters:
density: 0.3934598670872163
weight: 0.003967952578762479
- layer_range: [24, 32]
model: ./evolve-test-2/input_models/Llama-3-Open-Ko-8B_4015212464
parameters:
density: 1.0
weight: 0.4946465398634956
- layer_range: [24, 32]
model: ./evolve-test-2/input_models/Meta-Llama-3-8B_3118938735 |