base_model: mistralai/Mistral-7B-Instruct-v0.2 dtype: bfloat16 merge_method: dare_ties models: - model: mistralai/Mistral-7B-Instruct-v0.2 - model: Nexusflow/Starling-LM-7B-beta parameters: density: '0.53' weight: '0.4' - model: mlabonne/NeuralBeagle14-7B parameters: density: '0.53' weight: '0.3' - model: CorticalStack/pastiche-crown-clown-7b-dare-dpo parameters: density: '0.53' weight: '0.3' parameters: int8_mask: true