phixtral-2x2_8 / mergekit_moe_config.yml
mlabonne's picture
Upload folder using huggingface_hub
6fcd04e
raw
history blame
233 Bytes
base_model: cognitivecomputations/dolphin-2_6-phi-2
gate_mode: cheap_embed
experts:
- source_model: cognitivecomputations/dolphin-2_6-phi-2
positive_prompts: [""]
- source_model: lxuechen/phi-2-dpo
positive_prompts: [""]