base_model: | |
model: | |
path: mistralai/Mixtral-8x7B-v0.1 | |
dtype: bfloat16 | |
merge_method: dare_ties | |
slices: | |
- sources: | |
- layer_range: [0, 32] | |
model: | |
lora: | |
path: Doctor-Shotgun/limarp-zloss-mixtral-8x7b-qlora | |
model: | |
path: mistralai/Mixtral-8x7B-v0.1 | |
parameters: | |
density: 0.52 | |
weight: 0.22 | |
- layer_range: [0, 32] | |
model: | |
model: | |
path: Sao10K/Sensualize-Mixtral-bf16 | |
parameters: | |
density: 0.52 | |
weight: 0.22 | |
- layer_range: [0, 32] | |
model: | |
model: | |
path: mistralai/Mixtral-8x7B-Instruct-v0.1 | |
parameters: | |
density: 0.6 | |
weight: 1.0 | |
- layer_range: [0, 32] | |
model: | |
model: | |
path: jondurbin/bagel-dpo-8x7b-v0.2 | |
parameters: | |
density: 0.6 | |
weight: 0.5 | |
- layer_range: [0, 32] | |
model: | |
model: | |
path: mistralai/Mixtral-8x7B-v0.1 |