### This the config.yml for ABC_Books/test002 ### models: - model: KnutJaegersberg/Mistral-7B-EssayWriter parameters: weight: 0.1 density: 0.9 - model: MaziyarPanahi/Mistral-7B-claude-instruct-Mistral-7B-Instruct-v0.2-slerp parameters: weight: 0.1 density: 0.9 - model: ajibawa-2023/General-Stories-Mistral-7B parameters: weight: 0.1 density: 0.9 - model: ajibawa-2023/Young-Children-Storyteller-Mistral-7B parameters: weight: 0.1 density: 0.9 - model: jdqwoi/TooManyMixRolePlay-7B-Story_V3.5 parameters: weight: 0.1 density: 0.9 - model: kasper52786/StoryWeaver-7b-Instruct-v0.1 parameters: weight: 0.1 density: 0.9 - model: luozhuanggary/GOAT-v0.2-Mistral-7B-Claude parameters: weight: 0.1 density: 0.9 - model: scribis/Fantastica-7b-Instruct-0.2-Italian_merged parameters: weight: 0.1 density: 0.9 - model: tdh87/StoryTeller7b-meh parameters: weight: 0.1 density: 0.9 - model: MrRobotoAI/Test001a parameters: weight: 0.1 density: 0.9 merge_method: dare_ties ### Now this model best exemplifies the closest match to all of the features needed in the final model. So it now becomes the base model for merges ### base_model: MrRobotoAI/Test001a parameters: normalize: true int8_mask: true dtype: float16