Edit model card

J.O.S.I.E.3-Beta10-7B-slerp

J.O.S.I.E.3-Beta10-7B-slerp is a merge of the following models using LazyMergekit:

🧩 Configuration

slices:
  - sources:
      - model: Weyaxi/Einstein-v5-v0.2-7B
        layer_range: [0, 32]
      - model: argilla/CapybaraHermes-2.5-Mistral-7B
        layer_range: [0, 32]
merge_method: slerp
base_model: argilla/CapybaraHermes-2.5-Mistral-7B
parameters:
  t:
    - filter: self_attn
      value: [0, 0.5, 0.3, 0.7, 1]
    - filter: mlp
      value: [1, 0.5, 0.7, 0.3, 0]
    - value: 0.5
dtype: bfloat16

Evaluation

{
    "all": {
        "acc": 0.6312165296664113,
        "acc_stderr": 0.03236370559394293,
        "acc_norm": 0.6324439925872714,
        "acc_norm_stderr": 0.033019786616359854,
        "mc1": 0.39657282741738065,
        "mc1_stderr": 0.017124930942023518,
        "mc2": 0.5688038233837539,
        "mc2_stderr": 0.015263125204118244
    },
    "harness|arc:challenge|25": {
        "acc": 0.6126279863481229,
        "acc_stderr": 0.014235872487909869,
        "acc_norm": 0.6348122866894198,
        "acc_norm_stderr": 0.014070265519268802
    },
    "harness|hellaswag|10": {
        "acc": 0.643397729535949,
        "acc_stderr": 0.00478016987333285,
        "acc_norm": 0.8378809002190799,
        "acc_norm_stderr": 0.0036780679944244735
    },
    "harness|hendrycksTest-abstract_algebra|5": {
        "acc": 0.32,
        "acc_stderr": 0.046882617226215034,
        "acc_norm": 0.32,
        "acc_norm_stderr": 0.046882617226215034
    },
    "harness|hendrycksTest-anatomy|5": {
        "acc": 0.6074074074074074,
        "acc_stderr": 0.0421850621536888,
        "acc_norm": 0.6074074074074074,
        "acc_norm_stderr": 0.0421850621536888
    },
    "harness|hendrycksTest-astronomy|5": {
        "acc": 0.6907894736842105,
        "acc_stderr": 0.037610708698674805,
        "acc_norm": 0.6907894736842105,
        "acc_norm_stderr": 0.037610708698674805
    },
    "harness|hendrycksTest-business_ethics|5": {
        "acc": 0.61,
        "acc_stderr": 0.04902071300001975,
        "acc_norm": 0.61,
        "acc_norm_stderr": 0.04902071300001975
    },
    "harness|hendrycksTest-clinical_knowledge|5": {
        "acc": 0.6754716981132075,
        "acc_stderr": 0.02881561571343211,
        "acc_norm": 0.6754716981132075,
        "acc_norm_stderr": 0.02881561571343211
    },
    "harness|hendrycksTest-college_biology|5": {
        "acc": 0.7291666666666666,
        "acc_stderr": 0.03716177437566017,
        "acc_norm": 0.7291666666666666,
        "acc_norm_stderr": 0.03716177437566017
    },
    "harness|hendrycksTest-college_chemistry|5": {
        "acc": 0.47,
        "acc_stderr": 0.05016135580465919,
        "acc_norm": 0.47,
        "acc_norm_stderr": 0.05016135580465919
    },
    "harness|hendrycksTest-college_computer_science|5": {
        "acc": 0.48,
        "acc_stderr": 0.050211673156867795,
        "acc_norm": 0.48,
        "acc_norm_stderr": 0.050211673156867795
    },
    "harness|hendrycksTest-college_mathematics|5": {
        "acc": 0.34,
        "acc_stderr": 0.04760952285695235,
        "acc_norm": 0.34,
        "acc_norm_stderr": 0.04760952285695235
    },
    "harness|hendrycksTest-college_medicine|5": {
        "acc": 0.6011560693641619,
        "acc_stderr": 0.037336266553835096,
        "acc_norm": 0.6011560693641619,
        "acc_norm_stderr": 0.037336266553835096
    },
    "harness|hendrycksTest-college_physics|5": {
        "acc": 0.29411764705882354,
        "acc_stderr": 0.04533838195929775,
        "acc_norm": 0.29411764705882354,
        "acc_norm_stderr": 0.04533838195929775
    },
    "harness|hendrycksTest-computer_security|5": {
        "acc": 0.72,
        "acc_stderr": 0.045126085985421276,
        "acc_norm": 0.72,
        "acc_norm_stderr": 0.045126085985421276
    },
    "harness|hendrycksTest-conceptual_physics|5": {
        "acc": 0.5659574468085107,
        "acc_stderr": 0.03240038086792747,
        "acc_norm": 0.5659574468085107,
        "acc_norm_stderr": 0.03240038086792747
    },
    "harness|hendrycksTest-econometrics|5": {
        "acc": 0.5,
        "acc_stderr": 0.047036043419179864,
        "acc_norm": 0.5,
        "acc_norm_stderr": 0.047036043419179864
    },
    "harness|hendrycksTest-electrical_engineering|5": {
        "acc": 0.5448275862068965,
        "acc_stderr": 0.04149886942192117,
        "acc_norm": 0.5448275862068965,
        "acc_norm_stderr": 0.04149886942192117
    },
    "harness|hendrycksTest-elementary_mathematics|5": {
        "acc": 0.4021164021164021,
        "acc_stderr": 0.02525303255499769,
        "acc_norm": 0.4021164021164021,
        "acc_norm_stderr": 0.02525303255499769
    },
    "harness|hendrycksTest-formal_logic|5": {
        "acc": 0.42063492063492064,
        "acc_stderr": 0.04415438226743744,
        "acc_norm": 0.42063492063492064,
        "acc_norm_stderr": 0.04415438226743744
    },
    "harness|hendrycksTest-global_facts|5": {
        "acc": 0.39,
        "acc_stderr": 0.04902071300001975,
        "acc_norm": 0.39,
        "acc_norm_stderr": 0.04902071300001975
    },
    "harness|hendrycksTest-high_school_biology|5": {
        "acc": 0.7774193548387097,
        "acc_stderr": 0.02366421667164251,
        "acc_norm": 0.7774193548387097,
        "acc_norm_stderr": 0.02366421667164251
    },
    "harness|hendrycksTest-high_school_chemistry|5": {
        "acc": 0.4876847290640394,
        "acc_stderr": 0.035169204442208966,
        "acc_norm": 0.4876847290640394,
        "acc_norm_stderr": 0.035169204442208966
    },
    "harness|hendrycksTest-high_school_computer_science|5": {
        "acc": 0.68,
        "acc_stderr": 0.04688261722621505,
        "acc_norm": 0.68,
        "acc_norm_stderr": 0.04688261722621505
    },
    "harness|hendrycksTest-high_school_european_history|5": {
        "acc": 0.7818181818181819,
        "acc_stderr": 0.03225078108306289,
        "acc_norm": 0.7818181818181819,
        "acc_norm_stderr": 0.03225078108306289
    },
    "harness|hendrycksTest-high_school_geography|5": {
        "acc": 0.803030303030303,
        "acc_stderr": 0.02833560973246336,
        "acc_norm": 0.803030303030303,
        "acc_norm_stderr": 0.02833560973246336
    },
    "harness|hendrycksTest-high_school_government_and_politics|5": {
        "acc": 0.8549222797927462,
        "acc_stderr": 0.025416343096306433,
        "acc_norm": 0.8549222797927462,
        "acc_norm_stderr": 0.025416343096306433
    },
    "harness|hendrycksTest-high_school_macroeconomics|5": {
        "acc": 0.6435897435897436,
        "acc_stderr": 0.02428314052946731,
        "acc_norm": 0.6435897435897436,
        "acc_norm_stderr": 0.02428314052946731
    },
    "harness|hendrycksTest-high_school_mathematics|5": {
        "acc": 0.32592592592592595,
        "acc_stderr": 0.028578348365473072,
        "acc_norm": 0.32592592592592595,
        "acc_norm_stderr": 0.028578348365473072
    },
    "harness|hendrycksTest-high_school_microeconomics|5": {
        "acc": 0.6638655462184874,
        "acc_stderr": 0.030684737115135367,
        "acc_norm": 0.6638655462184874,
        "acc_norm_stderr": 0.030684737115135367
    },
    "harness|hendrycksTest-high_school_physics|5": {
        "acc": 0.31788079470198677,
        "acc_stderr": 0.038020397601079024,
        "acc_norm": 0.31788079470198677,
        "acc_norm_stderr": 0.038020397601079024
    },
    "harness|hendrycksTest-high_school_psychology|5": {
        "acc": 0.8220183486238533,
        "acc_stderr": 0.01639943636661289,
        "acc_norm": 0.8220183486238533,
        "acc_norm_stderr": 0.01639943636661289
    },
    "harness|hendrycksTest-high_school_statistics|5": {
        "acc": 0.5185185185185185,
        "acc_stderr": 0.034076320938540516,
        "acc_norm": 0.5185185185185185,
        "acc_norm_stderr": 0.034076320938540516
    },
    "harness|hendrycksTest-high_school_us_history|5": {
        "acc": 0.803921568627451,
        "acc_stderr": 0.027865942286639318,
        "acc_norm": 0.803921568627451,
        "acc_norm_stderr": 0.027865942286639318
    },
    "harness|hendrycksTest-high_school_world_history|5": {
        "acc": 0.7974683544303798,
        "acc_stderr": 0.026160568246601453,
        "acc_norm": 0.7974683544303798,
        "acc_norm_stderr": 0.026160568246601453
    },
    "harness|hendrycksTest-human_aging|5": {
        "acc": 0.6995515695067265,
        "acc_stderr": 0.03076935200822914,
        "acc_norm": 0.6995515695067265,
        "acc_norm_stderr": 0.03076935200822914
    },
    "harness|hendrycksTest-human_sexuality|5": {
        "acc": 0.7480916030534351,
        "acc_stderr": 0.03807387116306085,
        "acc_norm": 0.7480916030534351,
        "acc_norm_stderr": 0.03807387116306085
    },
    "harness|hendrycksTest-international_law|5": {
        "acc": 0.8016528925619835,
        "acc_stderr": 0.036401182719909456,
        "acc_norm": 0.8016528925619835,
        "acc_norm_stderr": 0.036401182719909456
    },
    "harness|hendrycksTest-jurisprudence|5": {
        "acc": 0.8055555555555556,
        "acc_stderr": 0.038260763248848646,
        "acc_norm": 0.8055555555555556,
        "acc_norm_stderr": 0.038260763248848646
    },
    "harness|hendrycksTest-logical_fallacies|5": {
        "acc": 0.754601226993865,
        "acc_stderr": 0.03380939813943354,
        "acc_norm": 0.754601226993865,
        "acc_norm_stderr": 0.03380939813943354
    },
    "harness|hendrycksTest-machine_learning|5": {
        "acc": 0.44642857142857145,
        "acc_stderr": 0.04718471485219588,
        "acc_norm": 0.44642857142857145,
        "acc_norm_stderr": 0.04718471485219588
    },
    "harness|hendrycksTest-management|5": {
        "acc": 0.7961165048543689,
        "acc_stderr": 0.039891398595317706,
        "acc_norm": 0.7961165048543689,
        "acc_norm_stderr": 0.039891398595317706
    },
    "harness|hendrycksTest-marketing|5": {
        "acc": 0.8589743589743589,
        "acc_stderr": 0.02280138253459754,
        "acc_norm": 0.8589743589743589,
        "acc_norm_stderr": 0.02280138253459754
    },
    "harness|hendrycksTest-medical_genetics|5": {
        "acc": 0.73,
        "acc_stderr": 0.044619604333847394,
        "acc_norm": 0.73,
        "acc_norm_stderr": 0.044619604333847394
    },
    "harness|hendrycksTest-miscellaneous|5": {
        "acc": 0.8084291187739464,
        "acc_stderr": 0.014072859310451949,
        "acc_norm": 0.8084291187739464,
        "acc_norm_stderr": 0.014072859310451949
    },
    "harness|hendrycksTest-moral_disputes|5": {
        "acc": 0.7312138728323699,
        "acc_stderr": 0.023868003262500104,
        "acc_norm": 0.7312138728323699,
        "acc_norm_stderr": 0.023868003262500104
    },
    "harness|hendrycksTest-moral_scenarios|5": {
        "acc": 0.24916201117318434,
        "acc_stderr": 0.014465893829859924,
        "acc_norm": 0.24916201117318434,
        "acc_norm_stderr": 0.014465893829859924
    },
    "harness|hendrycksTest-nutrition|5": {
        "acc": 0.7124183006535948,
        "acc_stderr": 0.02591780611714716,
        "acc_norm": 0.7124183006535948,
        "acc_norm_stderr": 0.02591780611714716
    },
    "harness|hendrycksTest-philosophy|5": {
        "acc": 0.7106109324758842,
        "acc_stderr": 0.025755865922632945,
        "acc_norm": 0.7106109324758842,
        "acc_norm_stderr": 0.025755865922632945
    },
    "harness|hendrycksTest-prehistory|5": {
        "acc": 0.6975308641975309,
        "acc_stderr": 0.02555765398186806,
        "acc_norm": 0.6975308641975309,
        "acc_norm_stderr": 0.02555765398186806
    },
    "harness|hendrycksTest-professional_accounting|5": {
        "acc": 0.49645390070921985,
        "acc_stderr": 0.02982674915328092,
        "acc_norm": 0.49645390070921985,
        "acc_norm_stderr": 0.02982674915328092
    },
    "harness|hendrycksTest-professional_law|5": {
        "acc": 0.4745762711864407,
        "acc_stderr": 0.01275371692910101,
        "acc_norm": 0.4745762711864407,
        "acc_norm_stderr": 0.01275371692910101
    },
    "harness|hendrycksTest-professional_medicine|5": {
        "acc": 0.6507352941176471,
        "acc_stderr": 0.028959755196824862,
        "acc_norm": 0.6507352941176471,
        "acc_norm_stderr": 0.028959755196824862
    },
    "harness|hendrycksTest-professional_psychology|5": {
        "acc": 0.6323529411764706,
        "acc_stderr": 0.019506291693954843,
        "acc_norm": 0.6323529411764706,
        "acc_norm_stderr": 0.019506291693954843
    },
    "harness|hendrycksTest-public_relations|5": {
        "acc": 0.6363636363636364,
        "acc_stderr": 0.046075820907199756,
        "acc_norm": 0.6363636363636364,
        "acc_norm_stderr": 0.046075820907199756
    },
    "harness|hendrycksTest-security_studies|5": {
        "acc": 0.7183673469387755,
        "acc_stderr": 0.028795185574291293,
        "acc_norm": 0.7183673469387755,
        "acc_norm_stderr": 0.028795185574291293
    },
    "harness|hendrycksTest-sociology|5": {
        "acc": 0.835820895522388,
        "acc_stderr": 0.026193923544454125,
        "acc_norm": 0.835820895522388,
        "acc_norm_stderr": 0.026193923544454125
    },
    "harness|hendrycksTest-us_foreign_policy|5": {
        "acc": 0.87,
        "acc_stderr": 0.033799766898963086,
        "acc_norm": 0.87,
        "acc_norm_stderr": 0.033799766898963086
    },
    "harness|hendrycksTest-virology|5": {
        "acc": 0.5180722891566265,
        "acc_stderr": 0.03889951252827216,
        "acc_norm": 0.5180722891566265,
        "acc_norm_stderr": 0.03889951252827216
    },
    "harness|hendrycksTest-world_religions|5": {
        "acc": 0.8187134502923976,
        "acc_stderr": 0.029547741687640038,
        "acc_norm": 0.8187134502923976,
        "acc_norm_stderr": 0.029547741687640038
    },
    "harness|truthfulqa:mc|0": {
        "mc1": 0.39657282741738065,
        "mc1_stderr": 0.017124930942023518,
        "mc2": 0.5688038233837539,
        "mc2_stderr": 0.015263125204118244
    },
    "harness|winogrande|5": {
        "acc": 0.7963693764798737,
        "acc_stderr": 0.011317798781626918
    },
    "harness|gsm8k|5": {
        "acc": 0.6103108415466262,
        "acc_stderr": 0.01343312323611072
    }
}

💻 Usage

!pip install -qU transformers accelerate

from transformers import AutoTokenizer
import transformers
import torch

model = "Isaak-Carter/J.O.S.I.E.3-Beta10-7B-slerp"
messages = [{"role": "user", "content": "What is a large language model?"}]

tokenizer = AutoTokenizer.from_pretrained(model)
prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
pipeline = transformers.pipeline(
    "text-generation",
    model=model,
    torch_dtype=torch.float16,
    device_map="auto",
)

outputs = pipeline(prompt, max_new_tokens=256, do_sample=True, temperature=0.7, top_k=50, top_p=0.95)
print(outputs[0]["generated_text"])
Downloads last month
6
Safetensors
Model size
7.24B params
Tensor type
BF16
·
Inference Examples
This model does not have enough activity to be deployed to Inference API (serverless) yet. Increase its social visibility and check back later, or deploy to Inference Endpoints (dedicated) instead.

Model tree for Goekdeniz-Guelmez/J.O.S.I.E.3-Beta10-7B-slerp