Edit model card

JOSIE_Beta-4-7B-slerp

JOSIE_Beta-4-7B-slerp is a merge of the following models using LazyMergekit:

IMPORTANT!!!

Upon seeing the eval results on the LLM Leaderboard, this is my second best performing model, the best one it beta 3.

{
    "all": {
        "acc": 0.6395082113582112,
        "acc_stderr": 0.0321581453772247,
        "acc_norm": 0.6406802207782241,
        "acc_norm_stderr": 0.032807499003593296,
        "mc1": 0.379436964504284,
        "mc1_stderr": 0.01698703926614299,
        "mc2": 0.5593281467190233,
        "mc2_stderr": 0.0156123862411416
    },
    "harness|arc:challenge|25": {
        "acc": 0.6143344709897611,
        "acc_stderr": 0.014224250973257182,
        "acc_norm": 0.6356655290102389,
        "acc_norm_stderr": 0.014063260279882419
    },
    "harness|hellaswag|10": {
        "acc": 0.6618203545110536,
        "acc_stderr": 0.004721231637092722,
        "acc_norm": 0.8409679346743677,
        "acc_norm_stderr": 0.0036495858528211842
    },
    "harness|hendrycksTest-abstract_algebra|5": {
        "acc": 0.34,
        "acc_stderr": 0.047609522856952365,
        "acc_norm": 0.34,
        "acc_norm_stderr": 0.047609522856952365
    },
    "harness|hendrycksTest-anatomy|5": {
        "acc": 0.6,
        "acc_stderr": 0.04232073695151589,
        "acc_norm": 0.6,
        "acc_norm_stderr": 0.04232073695151589
    },
    "harness|hendrycksTest-astronomy|5": {
        "acc": 0.6973684210526315,
        "acc_stderr": 0.037385206761196686,
        "acc_norm": 0.6973684210526315,
        "acc_norm_stderr": 0.037385206761196686
    },
    "harness|hendrycksTest-business_ethics|5": {
        "acc": 0.6,
        "acc_stderr": 0.04923659639173309,
        "acc_norm": 0.6,
        "acc_norm_stderr": 0.04923659639173309
    },
    "harness|hendrycksTest-clinical_knowledge|5": {
        "acc": 0.6943396226415094,
        "acc_stderr": 0.028353298073322666,
        "acc_norm": 0.6943396226415094,
        "acc_norm_stderr": 0.028353298073322666
    },
    "harness|hendrycksTest-college_biology|5": {
        "acc": 0.7638888888888888,
        "acc_stderr": 0.03551446610810826,
        "acc_norm": 0.7638888888888888,
        "acc_norm_stderr": 0.03551446610810826
    },
    "harness|hendrycksTest-college_chemistry|5": {
        "acc": 0.46,
        "acc_stderr": 0.05009082659620333,
        "acc_norm": 0.46,
        "acc_norm_stderr": 0.05009082659620333
    },
    "harness|hendrycksTest-college_computer_science|5": {
        "acc": 0.52,
        "acc_stderr": 0.050211673156867795,
        "acc_norm": 0.52,
        "acc_norm_stderr": 0.050211673156867795
    },
    "harness|hendrycksTest-college_mathematics|5": {
        "acc": 0.32,
        "acc_stderr": 0.04688261722621504,
        "acc_norm": 0.32,
        "acc_norm_stderr": 0.04688261722621504
    },
    "harness|hendrycksTest-college_medicine|5": {
        "acc": 0.6705202312138728,
        "acc_stderr": 0.03583901754736412,
        "acc_norm": 0.6705202312138728,
        "acc_norm_stderr": 0.03583901754736412
    },
    "harness|hendrycksTest-college_physics|5": {
        "acc": 0.3627450980392157,
        "acc_stderr": 0.04784060704105654,
        "acc_norm": 0.3627450980392157,
        "acc_norm_stderr": 0.04784060704105654
    },
    "harness|hendrycksTest-computer_security|5": {
        "acc": 0.75,
        "acc_stderr": 0.04351941398892446,
        "acc_norm": 0.75,
        "acc_norm_stderr": 0.04351941398892446
    },
    "harness|hendrycksTest-conceptual_physics|5": {
        "acc": 0.5574468085106383,
        "acc_stderr": 0.03246956919789958,
        "acc_norm": 0.5574468085106383,
        "acc_norm_stderr": 0.03246956919789958
    },
    "harness|hendrycksTest-econometrics|5": {
        "acc": 0.4473684210526316,
        "acc_stderr": 0.04677473004491199,
        "acc_norm": 0.4473684210526316,
        "acc_norm_stderr": 0.04677473004491199
    },
    "harness|hendrycksTest-electrical_engineering|5": {
        "acc": 0.5586206896551724,
        "acc_stderr": 0.04137931034482758,
        "acc_norm": 0.5586206896551724,
        "acc_norm_stderr": 0.04137931034482758
    },
    "harness|hendrycksTest-elementary_mathematics|5": {
        "acc": 0.41005291005291006,
        "acc_stderr": 0.025331202438944444,
        "acc_norm": 0.41005291005291006,
        "acc_norm_stderr": 0.025331202438944444
    },
    "harness|hendrycksTest-formal_logic|5": {
        "acc": 0.42857142857142855,
        "acc_stderr": 0.0442626668137991,
        "acc_norm": 0.42857142857142855,
        "acc_norm_stderr": 0.0442626668137991
    },
    "harness|hendrycksTest-global_facts|5": {
        "acc": 0.34,
        "acc_stderr": 0.04760952285695235,
        "acc_norm": 0.34,
        "acc_norm_stderr": 0.04760952285695235
    },
    "harness|hendrycksTest-high_school_biology|5": {
        "acc": 0.7709677419354839,
        "acc_stderr": 0.023904914311782648,
        "acc_norm": 0.7709677419354839,
        "acc_norm_stderr": 0.023904914311782648
    },
    "harness|hendrycksTest-high_school_chemistry|5": {
        "acc": 0.5073891625615764,
        "acc_stderr": 0.035176035403610105,
        "acc_norm": 0.5073891625615764,
        "acc_norm_stderr": 0.035176035403610105
    },
    "harness|hendrycksTest-high_school_computer_science|5": {
        "acc": 0.69,
        "acc_stderr": 0.04648231987117316,
        "acc_norm": 0.69,
        "acc_norm_stderr": 0.04648231987117316
    },
    "harness|hendrycksTest-high_school_european_history|5": {
        "acc": 0.7818181818181819,
        "acc_stderr": 0.03225078108306289,
        "acc_norm": 0.7818181818181819,
        "acc_norm_stderr": 0.03225078108306289
    },
    "harness|hendrycksTest-high_school_geography|5": {
        "acc": 0.8080808080808081,
        "acc_stderr": 0.028057791672989017,
        "acc_norm": 0.8080808080808081,
        "acc_norm_stderr": 0.028057791672989017
    },
    "harness|hendrycksTest-high_school_government_and_politics|5": {
        "acc": 0.8963730569948186,
        "acc_stderr": 0.02199531196364424,
        "acc_norm": 0.8963730569948186,
        "acc_norm_stderr": 0.02199531196364424
    },
    "harness|hendrycksTest-high_school_macroeconomics|5": {
        "acc": 0.6435897435897436,
        "acc_stderr": 0.02428314052946731,
        "acc_norm": 0.6435897435897436,
        "acc_norm_stderr": 0.02428314052946731
    },
    "harness|hendrycksTest-high_school_mathematics|5": {
        "acc": 0.32592592592592595,
        "acc_stderr": 0.028578348365473072,
        "acc_norm": 0.32592592592592595,
        "acc_norm_stderr": 0.028578348365473072
    },
    "harness|hendrycksTest-high_school_microeconomics|5": {
        "acc": 0.6764705882352942,
        "acc_stderr": 0.030388353551886797,
        "acc_norm": 0.6764705882352942,
        "acc_norm_stderr": 0.030388353551886797
    },
    "harness|hendrycksTest-high_school_physics|5": {
        "acc": 0.3576158940397351,
        "acc_stderr": 0.03913453431177258,
        "acc_norm": 0.3576158940397351,
        "acc_norm_stderr": 0.03913453431177258
    },
    "harness|hendrycksTest-high_school_psychology|5": {
        "acc": 0.8440366972477065,
        "acc_stderr": 0.015555802713590167,
        "acc_norm": 0.8440366972477065,
        "acc_norm_stderr": 0.015555802713590167
    },
    "harness|hendrycksTest-high_school_statistics|5": {
        "acc": 0.5324074074074074,
        "acc_stderr": 0.03402801581358966,
        "acc_norm": 0.5324074074074074,
        "acc_norm_stderr": 0.03402801581358966
    },
    "harness|hendrycksTest-high_school_us_history|5": {
        "acc": 0.803921568627451,
        "acc_stderr": 0.027865942286639318,
        "acc_norm": 0.803921568627451,
        "acc_norm_stderr": 0.027865942286639318
    },
    "harness|hendrycksTest-high_school_world_history|5": {
        "acc": 0.7932489451476793,
        "acc_stderr": 0.0263616516683891,
        "acc_norm": 0.7932489451476793,
        "acc_norm_stderr": 0.0263616516683891
    },
    "harness|hendrycksTest-human_aging|5": {
        "acc": 0.6905829596412556,
        "acc_stderr": 0.03102441174057221,
        "acc_norm": 0.6905829596412556,
        "acc_norm_stderr": 0.03102441174057221
    },
    "harness|hendrycksTest-human_sexuality|5": {
        "acc": 0.7938931297709924,
        "acc_stderr": 0.035477710041594654,
        "acc_norm": 0.7938931297709924,
        "acc_norm_stderr": 0.035477710041594654
    },
    "harness|hendrycksTest-international_law|5": {
        "acc": 0.7851239669421488,
        "acc_stderr": 0.03749492448709696,
        "acc_norm": 0.7851239669421488,
        "acc_norm_stderr": 0.03749492448709696
    },
    "harness|hendrycksTest-jurisprudence|5": {
        "acc": 0.8240740740740741,
        "acc_stderr": 0.036809181416738807,
        "acc_norm": 0.8240740740740741,
        "acc_norm_stderr": 0.036809181416738807
    },
    "harness|hendrycksTest-logical_fallacies|5": {
        "acc": 0.7239263803680982,
        "acc_stderr": 0.035123852837050475,
        "acc_norm": 0.7239263803680982,
        "acc_norm_stderr": 0.035123852837050475
    },
    "harness|hendrycksTest-machine_learning|5": {
        "acc": 0.44642857142857145,
        "acc_stderr": 0.04718471485219588,
        "acc_norm": 0.44642857142857145,
        "acc_norm_stderr": 0.04718471485219588
    },
    "harness|hendrycksTest-management|5": {
        "acc": 0.7766990291262136,
        "acc_stderr": 0.04123553189891431,
        "acc_norm": 0.7766990291262136,
        "acc_norm_stderr": 0.04123553189891431
    },
    "harness|hendrycksTest-marketing|5": {
        "acc": 0.8760683760683761,
        "acc_stderr": 0.021586494001281376,
        "acc_norm": 0.8760683760683761,
        "acc_norm_stderr": 0.021586494001281376
    },
    "harness|hendrycksTest-medical_genetics|5": {
        "acc": 0.73,
        "acc_stderr": 0.044619604333847394,
        "acc_norm": 0.73,
        "acc_norm_stderr": 0.044619604333847394
    },
    "harness|hendrycksTest-miscellaneous|5": {
        "acc": 0.8212005108556832,
        "acc_stderr": 0.01370264371536898,
        "acc_norm": 0.8212005108556832,
        "acc_norm_stderr": 0.01370264371536898
    },
    "harness|hendrycksTest-moral_disputes|5": {
        "acc": 0.7167630057803468,
        "acc_stderr": 0.024257901705323374,
        "acc_norm": 0.7167630057803468,
        "acc_norm_stderr": 0.024257901705323374
    },
    "harness|hendrycksTest-moral_scenarios|5": {
        "acc": 0.2759776536312849,
        "acc_stderr": 0.014950103002475356,
        "acc_norm": 0.2759776536312849,
        "acc_norm_stderr": 0.014950103002475356
    },
    "harness|hendrycksTest-nutrition|5": {
        "acc": 0.7189542483660131,
        "acc_stderr": 0.02573885479781873,
        "acc_norm": 0.7189542483660131,
        "acc_norm_stderr": 0.02573885479781873
    },
    "harness|hendrycksTest-philosophy|5": {
        "acc": 0.7202572347266881,
        "acc_stderr": 0.02549425935069491,
        "acc_norm": 0.7202572347266881,
        "acc_norm_stderr": 0.02549425935069491
    },
    "harness|hendrycksTest-prehistory|5": {
        "acc": 0.7314814814814815,
        "acc_stderr": 0.024659685185967284,
        "acc_norm": 0.7314814814814815,
        "acc_norm_stderr": 0.024659685185967284
    },
    "harness|hendrycksTest-professional_accounting|5": {
        "acc": 0.475177304964539,
        "acc_stderr": 0.02979071924382972,
        "acc_norm": 0.475177304964539,
        "acc_norm_stderr": 0.02979071924382972
    },
    "harness|hendrycksTest-professional_law|5": {
        "acc": 0.4621903520208605,
        "acc_stderr": 0.012733671880342504,
        "acc_norm": 0.4621903520208605,
        "acc_norm_stderr": 0.012733671880342504
    },
    "harness|hendrycksTest-professional_medicine|5": {
        "acc": 0.6691176470588235,
        "acc_stderr": 0.028582709753898445,
        "acc_norm": 0.6691176470588235,
        "acc_norm_stderr": 0.028582709753898445
    },
    "harness|hendrycksTest-professional_psychology|5": {
        "acc": 0.6535947712418301,
        "acc_stderr": 0.01924978569171721,
        "acc_norm": 0.6535947712418301,
        "acc_norm_stderr": 0.01924978569171721
    },
    "harness|hendrycksTest-public_relations|5": {
        "acc": 0.6636363636363637,
        "acc_stderr": 0.04525393596302506,
        "acc_norm": 0.6636363636363637,
        "acc_norm_stderr": 0.04525393596302506
    },
    "harness|hendrycksTest-security_studies|5": {
        "acc": 0.7428571428571429,
        "acc_stderr": 0.027979823538744546,
        "acc_norm": 0.7428571428571429,
        "acc_norm_stderr": 0.027979823538744546
    },
    "harness|hendrycksTest-sociology|5": {
        "acc": 0.835820895522388,
        "acc_stderr": 0.026193923544454125,
        "acc_norm": 0.835820895522388,
        "acc_norm_stderr": 0.026193923544454125
    },
    "harness|hendrycksTest-us_foreign_policy|5": {
        "acc": 0.87,
        "acc_stderr": 0.033799766898963086,
        "acc_norm": 0.87,
        "acc_norm_stderr": 0.033799766898963086
    },
    "harness|hendrycksTest-virology|5": {
        "acc": 0.5481927710843374,
        "acc_stderr": 0.03874371556587953,
        "acc_norm": 0.5481927710843374,
        "acc_norm_stderr": 0.03874371556587953
    },
    "harness|hendrycksTest-world_religions|5": {
        "acc": 0.8187134502923976,
        "acc_stderr": 0.029547741687640038,
        "acc_norm": 0.8187134502923976,
        "acc_norm_stderr": 0.029547741687640038
    },
    "harness|truthfulqa:mc|0": {
        "mc1": 0.379436964504284,
        "mc1_stderr": 0.01698703926614299,
        "mc2": 0.5593281467190233,
        "mc2_stderr": 0.0156123862411416
    },
    "harness|winogrande|5": {
        "acc": 0.7932123125493291,
        "acc_stderr": 0.011382566829235803
    },
    "harness|gsm8k|5": {
        "acc": 0.6171341925701289,
        "acc_stderr": 0.013389223491820474
    }
}

🧩 Configuration

slices:
  - sources:
      - model: Weyaxi/Einstein-v4-7B
        layer_range: [0, 32]
      - model: cognitivecomputations/dolphin-2.8-experiment26-7b
        layer_range: [0, 32]
merge_method: slerp
base_model: Weyaxi/Einstein-v4-7B
parameters:
  t:
    - filter: self_attn
      value: [0, 0.5, 0.3, 0.7, 1]
    - filter: mlp
      value: [1, 0.5, 0.7, 0.3, 0]
    - value: 0.5
dtype: bfloat16

💻 Usage

!pip install -qU transformers accelerate

from transformers import AutoTokenizer
import transformers
import torch

model = "Isaak-Carter/JOSIE_Beta-4-7B-slerp"
messages = [{"role": "user", "content": "What is a large language model?"}]

tokenizer = AutoTokenizer.from_pretrained(model)
prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
pipeline = transformers.pipeline(
    "text-generation",
    model=model,
    torch_dtype=torch.float16,
    device_map="auto",
)

outputs = pipeline(prompt, max_new_tokens=256, do_sample=True, temperature=0.7, top_k=50, top_p=0.95)
print(outputs[0]["generated_text"])
Downloads last month
6
Safetensors
Model size
7.24B params
Tensor type
BF16
·
Inference Examples
This model does not have enough activity to be deployed to Inference API (serverless) yet. Increase its social visibility and check back later, or deploy to Inference Endpoints (dedicated) instead.

Model tree for Goekdeniz-Guelmez/J.O.S.I.E.3-Beta4-slerp