dolphin-laserxtral-4x7B / mergekit_moe_config.yml
ehartford's picture
Upload folder using huggingface_hub
1c9cd96 verified
raw
history blame contribute delete
887 Bytes
base_model: mlabonne/Marcoro14-7B-slerp
gate_mode: hidden # one of "hidden", "cheap_embed", or "random"
dtype: bfloat16 # output dtype (float32, float16, or bfloat16)
experts:
- source_model: cognitivecomputations/dolphin-2.6-mistral-7b-dpo-laser
positive_prompts:
- "chat"
- "assistant"
- "tell me"
- "explain"
- source_model: beowolx/CodeNinja-1.0-OpenChat-7B
positive_prompts:
- "code"
- "python"
- "javascript"
- "programming"
- "algorithm"
- source_model: maywell/PiVoT-0.1-Starling-LM-RP
positive_prompts:
- "storywriting"
- "write"
- "scene"
- "story"
- "character"
- source_model: WizardLM/WizardMath-7B-V1.1
positive_prompts:
- "reason"
- "math"
- "mathematics"
- "solve"
- "count"
#
# To run this
# mergekit-moe --trust-remote-code --random-seed 0 ./moe_laser.yml ./moe_laser