File size: 1,322 Bytes
bca9a0e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9405f8c
 
 
 
 
bca9a0e
9405f8c
 
 
 
bca9a0e
9405f8c
 
 
 
bca9a0e
9405f8c
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
name: monk
models:
  - model: nbeerbower/mistral-nemo-bophades-12B
  - model: nbeerbower/mistral-nemo-wissenschaft-12B
merge_method: slerp
base_model: nbeerbower/mistral-nemo-bophades-12B
parameters:
  t: [0.1, 0.2, 0.4, 0.6, 0.6, 0.4, 0.2, 0.1]
dtype: bfloat16
tokenizer_source: base
---
name: hero
models:
  - model: elinas/Chronos-Gold-12B-1.0
  - model: Fizzarolli/MN-12b-Sunrose
merge_method: slerp
base_model: elinas/Chronos-Gold-12B-1.0
parameters:
  t: [0.1, 0.2, 0.4, 0.6, 0.6, 0.4, 0.2, 0.1]
dtype: bfloat16
tokenizer_source: base
---
name: deity
models:
  - model: nbeerbower/mistral-nemo-gutenberg-12B-v4
  - model: anthracite-org/magnum-12b-v2.5-kto
merge_method: slerp
base_model: nbeerbower/mistral-nemo-gutenberg-12B-v4
parameters:
  t: [0, 0.1, 0.2, 0.25, 0.25, 0.2, 0.1, 0]
dtype: bfloat16
tokenizer_source: base
---

base_model: IntervitensInc/Mistral-Nemo-Base-2407-chatml
merge_method: dare_ties
slices:
- sources:
  - layer_range: [0, 40]
    model: monk
    parameters:
      density: 0.7
      weight: 0.5
  - layer_range: [0, 40]
    model: hero
    parameters:
      density: 0.9
      weight: 1.0
  - layer_range: [0, 40]
    model: deity
    parameters:
      density: 0.5
      weight: 0.7
  - layer_range: [0, 40]
    model: IntervitensInc/Mistral-Nemo-Base-2407-chatml
tokenizer_source: base