Llama-3-8b-ita-slerp / mergekit_config.yml
anakin87's picture
Upload folder using huggingface_hub
f87fd4a verified
raw
history blame contribute delete
461 Bytes
slices:
- sources:
- model: swap-uniba/LLaMAntino-3-ANITA-8B-Inst-DPO-ITA
layer_range:
- 0
- 32
- model: DeepMount00/Llama-3-8b-Ita
layer_range:
- 0
- 32
merge_method: slerp
base_model: swap-uniba/LLaMAntino-3-ANITA-8B-Inst-DPO-ITA
parameters:
t:
- filter: self_attn
value:
- 0
- 0.5
- 0.3
- 0.7
- 1
- filter: mlp
value:
- 1
- 0.5
- 0.7
- 0.3
- 0
- value: 0.5
dtype: bfloat16