File size: 477 Bytes
04677f5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 |
merge_method: task_arithmetic
base_model: technicolor
parameters:
normalize: true
models:
- model: technicolor+jeiku/Theory_of_Mind_Roleplay_Mistral
parameters:
weight: 1
- model: technicolor+jeiku/Theory_of_Mind_Mistral
parameters:
weight: 1
- model: technicolor+jeiku/Gnosis_Reformatted_Mistral
parameters:
weight: 1
- model: technicolor+Undi95/Mistral-7B-small_pippa_limaRP-v3-lora
parameters:
weight: 1
dtype: float16 |