models: - model: google/gemma-2-9b-it - model: wzhouad/gemma-2-9b-it-WPO-HB parameters: density: 0.65 weight: 1.00 - model: princeton-nlp/gemma-2-9b-it-SimPO parameters: density: 1.0 weight: 0.00001 merge_method: della base_model: google/gemma-2-9b-it parameters: normalize: true int8_mask: true lambda: 1.0 epsilon: 0.1 dtype: float16