base_model: Open-Orca/Mistral-7B-SlimOrca dtype: bfloat16 merge_method: dare_ties models: - model: cognitivecomputations/openchat-3.5-0106-laser parameters: density: 0.74 weight: [0, 0.2, 0.4] - model: Nexusflow/Starling-LM-7B-beta parameters: density: 0.74 weight: [0.3, 0.4] - model: cognitivecomputations/samantha-1.1-westlake-7b-laser parameters: density: 0.74 weight: [0.3, 0.4] - model: cognitivecomputations/dolphin-2.2.1-mistral-7b parameters: density: 0.74 weight: [0.3, 0.4] parameters: int8_mask: true normalize: true