Mixtral_7Bx2_MoE / moe.yaml
cloudyu's picture
Upload folder using huggingface_hub
835b04a
raw
history blame
341 Bytes
base_model: rwitz2/go-bruins-v2.1.1
experts:
- source_model: NurtureAI/neural-chat-7b-v3-16k
positive_prompts:
- "adventure"
- "roleplay"
- "friend"
- "chat"
- "companion"
- "[Mode: Roleplay]"
- source_model: mncai/mistral-7b-dpo-v6
positive_prompts:
- "[Mode: Occultism]"
- "[Mode: Mathematics]"