NeuralZephyr-Beagle-7B / mergekit_config.yml
mayacinka's picture
Upload folder using huggingface_hub
2856d0b verified
raw
history blame
430 Bytes
models:
- model: /Users/akim/ma/models/NeuralTrix-7B-dpo # base model doesn't need any parameters
- model: /Users/akim/ma/models/zephyr-7b-alpha
parameters:
density: 0.83
weight: 0.4
- model: /Users/akim/ma/models/NeuralBeagle14-7B
parameters:
density: 0.83
weight: 0.6
merge_method: dare_ties
base_model: /Users/akim/ma/models/NeuralTrix-7B-dpo
parameters:
int8_mask: true
dtype: bfloat16