chat_gpt2_dpo / README.md
Sharathhebbar24's picture
Update README.md
bbd5a7e verified
|
raw
history blame
No virus
16.1 kB
metadata
license: apache-2.0
datasets:
  - HuggingFaceH4/ultrachat_200k
  - Intel/orca_dpo_pairs
language:
  - en
pipeline_tag: text-generation
tags:
  - gpt2
  - dpo

This model is a finetuned version of Sharathhebbar24/chat_gpt2 using Intel/orca_dpo_pairs on DPO

Model description

GPT-2 is a transformers model pre-trained on a very large corpus of English data in a self-supervised fashion. This means it was pre-trained on the raw texts only, with no humans labeling them in any way (which is why it can use lots of publicly available data) with an automatic process to generate inputs and labels from those texts. More precisely, it was trained to guess the next word in sentences.

More precisely, inputs are sequences of continuous text of a certain length and the targets are the same sequence, shifting one token (word or piece of word) to the right. The model uses a masking mechanism to make sure the predictions for the token i only use the inputs from 1 to i but not the future tokens.

This way, the model learns an inner representation of the English language that can then be used to extract features useful for downstream tasks. The model is best at what it was trained for, however, which is generating texts from a prompt.

To use this model

>>> from transformers import AutoTokenizer, AutoModelForCausalLM
>>> model_name = "Sharathhebbar24/chat_gpt2_dpo"
>>> model = AutoModelForCausalLM.from_pretrained(model_name)
>>> tokenizer = AutoTokenizer.from_pretrained(model_name)
>>> def generate_text(prompt):
>>>  inputs = tokenizer.encode(prompt, return_tensors='pt')
>>>  outputs = model.generate(inputs, max_length=64, pad_token_id=tokenizer.eos_token_id)
>>>  generated = tokenizer.decode(outputs[0], skip_special_tokens=True)
>>>  return generated[:generated.rfind(".")+1]
>>> prompt = """
>>> user: what are you?
>>> assistant: I am a Chatbot intended to give a python program
>>> user: hmm,  can you write a python program to print Hii Heloo
>>> assistant: Sure Here is a python code.\n print("Hii Heloo")
>>> user: Can you write a Linear search program in python
>>> """
>>> res = generate_text(prompt)
>>> res

Benchmark / Evaluation

Model Average ARC HellaSwag MMLU TruthfulQA Winogrande GSM8k
Sharathhebbar24/chat_gpt2_dpo 28.56 23.98 31.22 24.95 41.26 49.96 0
{
    "all": {
        "acc": 0.24915779048270345,
        "acc_stderr": 0.030509906389610868,
        "acc_norm": 0.25041231816215265,
        "acc_norm_stderr": 0.03132600249114931,
        "mc1": 0.2521419828641371,
        "mc1_stderr": 0.015201522246299965,
        "mc2": 0.41257163824244014,
        "mc2_stderr": 0.015127188811834062
    },
    "harness|arc:challenge|25": {
        "acc": 0.18686006825938567,
        "acc_stderr": 0.011391015649694391,
        "acc_norm": 0.23976109215017063,
        "acc_norm_stderr": 0.012476304127453954
    },
    "harness|hellaswag|10": {
        "acc": 0.28978291177056364,
        "acc_stderr": 0.004527343651130803,
        "acc_norm": 0.3121888070105557,
        "acc_norm_stderr": 0.0046243936909668975
    },
    "harness|hendrycksTest-abstract_algebra|5": {
        "acc": 0.22,
        "acc_stderr": 0.04163331998932268,
        "acc_norm": 0.22,
        "acc_norm_stderr": 0.04163331998932268
    },
    "harness|hendrycksTest-anatomy|5": {
        "acc": 0.3037037037037037,
        "acc_stderr": 0.039725528847851375,
        "acc_norm": 0.3037037037037037,
        "acc_norm_stderr": 0.039725528847851375
    },
    "harness|hendrycksTest-astronomy|5": {
        "acc": 0.17763157894736842,
        "acc_stderr": 0.031103182383123398,
        "acc_norm": 0.17763157894736842,
        "acc_norm_stderr": 0.031103182383123398
    },
    "harness|hendrycksTest-business_ethics|5": {
        "acc": 0.26,
        "acc_stderr": 0.0440844002276808,
        "acc_norm": 0.26,
        "acc_norm_stderr": 0.0440844002276808
    },
    "harness|hendrycksTest-clinical_knowledge|5": {
        "acc": 0.23018867924528302,
        "acc_stderr": 0.025907897122408173,
        "acc_norm": 0.23018867924528302,
        "acc_norm_stderr": 0.025907897122408173
    },
    "harness|hendrycksTest-college_biology|5": {
        "acc": 0.2569444444444444,
        "acc_stderr": 0.03653946969442099,
        "acc_norm": 0.2569444444444444,
        "acc_norm_stderr": 0.03653946969442099
    },
    "harness|hendrycksTest-college_chemistry|5": {
        "acc": 0.19,
        "acc_stderr": 0.039427724440366234,
        "acc_norm": 0.19,
        "acc_norm_stderr": 0.039427724440366234
    },
    "harness|hendrycksTest-college_computer_science|5": {
        "acc": 0.24,
        "acc_stderr": 0.04292346959909283,
        "acc_norm": 0.24,
        "acc_norm_stderr": 0.04292346959909283
    },
    "harness|hendrycksTest-college_mathematics|5": {
        "acc": 0.29,
        "acc_stderr": 0.04560480215720684,
        "acc_norm": 0.29,
        "acc_norm_stderr": 0.04560480215720684
    },
    "harness|hendrycksTest-college_medicine|5": {
        "acc": 0.2543352601156069,
        "acc_stderr": 0.0332055644308557,
        "acc_norm": 0.2543352601156069,
        "acc_norm_stderr": 0.0332055644308557
    },
    "harness|hendrycksTest-college_physics|5": {
        "acc": 0.21568627450980393,
        "acc_stderr": 0.04092563958237654,
        "acc_norm": 0.21568627450980393,
        "acc_norm_stderr": 0.04092563958237654
    },
    "harness|hendrycksTest-computer_security|5": {
        "acc": 0.34,
        "acc_stderr": 0.04760952285695236,
        "acc_norm": 0.34,
        "acc_norm_stderr": 0.04760952285695236
    },
    "harness|hendrycksTest-conceptual_physics|5": {
        "acc": 0.26382978723404255,
        "acc_stderr": 0.028809989854102973,
        "acc_norm": 0.26382978723404255,
        "acc_norm_stderr": 0.028809989854102973
    },
    "harness|hendrycksTest-econometrics|5": {
        "acc": 0.24561403508771928,
        "acc_stderr": 0.04049339297748142,
        "acc_norm": 0.24561403508771928,
        "acc_norm_stderr": 0.04049339297748142
    },
    "harness|hendrycksTest-electrical_engineering|5": {
        "acc": 0.2413793103448276,
        "acc_stderr": 0.03565998174135302,
        "acc_norm": 0.2413793103448276,
        "acc_norm_stderr": 0.03565998174135302
    },
    "harness|hendrycksTest-elementary_mathematics|5": {
        "acc": 0.24074074074074073,
        "acc_stderr": 0.02201908001221789,
        "acc_norm": 0.24074074074074073,
        "acc_norm_stderr": 0.02201908001221789
    },
    "harness|hendrycksTest-formal_logic|5": {
        "acc": 0.1349206349206349,
        "acc_stderr": 0.030557101589417515,
        "acc_norm": 0.1349206349206349,
        "acc_norm_stderr": 0.030557101589417515
    },
    "harness|hendrycksTest-global_facts|5": {
        "acc": 0.16,
        "acc_stderr": 0.03684529491774708,
        "acc_norm": 0.16,
        "acc_norm_stderr": 0.03684529491774708
    },
    "harness|hendrycksTest-high_school_biology|5": {
        "acc": 0.1774193548387097,
        "acc_stderr": 0.02173254068932927,
        "acc_norm": 0.1774193548387097,
        "acc_norm_stderr": 0.02173254068932927
    },
    "harness|hendrycksTest-high_school_chemistry|5": {
        "acc": 0.24630541871921183,
        "acc_stderr": 0.030315099285617736,
        "acc_norm": 0.24630541871921183,
        "acc_norm_stderr": 0.030315099285617736
    },
    "harness|hendrycksTest-high_school_computer_science|5": {
        "acc": 0.28,
        "acc_stderr": 0.04512608598542126,
        "acc_norm": 0.28,
        "acc_norm_stderr": 0.04512608598542126
    },
    "harness|hendrycksTest-high_school_european_history|5": {
        "acc": 0.21818181818181817,
        "acc_stderr": 0.03225078108306289,
        "acc_norm": 0.21818181818181817,
        "acc_norm_stderr": 0.03225078108306289
    },
    "harness|hendrycksTest-high_school_geography|5": {
        "acc": 0.3282828282828283,
        "acc_stderr": 0.03345678422756776,
        "acc_norm": 0.3282828282828283,
        "acc_norm_stderr": 0.03345678422756776
    },
    "harness|hendrycksTest-high_school_government_and_politics|5": {
        "acc": 0.37305699481865284,
        "acc_stderr": 0.03490205592048573,
        "acc_norm": 0.37305699481865284,
        "acc_norm_stderr": 0.03490205592048573
    },
    "harness|hendrycksTest-high_school_macroeconomics|5": {
        "acc": 0.26666666666666666,
        "acc_stderr": 0.02242127361292371,
        "acc_norm": 0.26666666666666666,
        "acc_norm_stderr": 0.02242127361292371
    },
    "harness|hendrycksTest-high_school_mathematics|5": {
        "acc": 0.21481481481481482,
        "acc_stderr": 0.025040443877000683,
        "acc_norm": 0.21481481481481482,
        "acc_norm_stderr": 0.025040443877000683
    },
    "harness|hendrycksTest-high_school_microeconomics|5": {
        "acc": 0.22268907563025211,
        "acc_stderr": 0.027025433498882364,
        "acc_norm": 0.22268907563025211,
        "acc_norm_stderr": 0.027025433498882364
    },
    "harness|hendrycksTest-high_school_physics|5": {
        "acc": 0.23178807947019867,
        "acc_stderr": 0.034454062719870546,
        "acc_norm": 0.23178807947019867,
        "acc_norm_stderr": 0.034454062719870546
    },
    "harness|hendrycksTest-high_school_psychology|5": {
        "acc": 0.3302752293577982,
        "acc_stderr": 0.02016446633634298,
        "acc_norm": 0.3302752293577982,
        "acc_norm_stderr": 0.02016446633634298
    },
    "harness|hendrycksTest-high_school_statistics|5": {
        "acc": 0.19444444444444445,
        "acc_stderr": 0.026991454502036733,
        "acc_norm": 0.19444444444444445,
        "acc_norm_stderr": 0.026991454502036733
    },
    "harness|hendrycksTest-high_school_us_history|5": {
        "acc": 0.25,
        "acc_stderr": 0.03039153369274154,
        "acc_norm": 0.25,
        "acc_norm_stderr": 0.03039153369274154
    },
    "harness|hendrycksTest-high_school_world_history|5": {
        "acc": 0.26582278481012656,
        "acc_stderr": 0.028756799629658342,
        "acc_norm": 0.26582278481012656,
        "acc_norm_stderr": 0.028756799629658342
    },
    "harness|hendrycksTest-human_aging|5": {
        "acc": 0.17937219730941703,
        "acc_stderr": 0.0257498195691928,
        "acc_norm": 0.17937219730941703,
        "acc_norm_stderr": 0.0257498195691928
    },
    "harness|hendrycksTest-human_sexuality|5": {
        "acc": 0.2366412213740458,
        "acc_stderr": 0.037276735755969174,
        "acc_norm": 0.2366412213740458,
        "acc_norm_stderr": 0.037276735755969174
    },
    "harness|hendrycksTest-international_law|5": {
        "acc": 0.35537190082644626,
        "acc_stderr": 0.04369236326573981,
        "acc_norm": 0.35537190082644626,
        "acc_norm_stderr": 0.04369236326573981
    },
    "harness|hendrycksTest-jurisprudence|5": {
        "acc": 0.25925925925925924,
        "acc_stderr": 0.042365112580946336,
        "acc_norm": 0.25925925925925924,
        "acc_norm_stderr": 0.042365112580946336
    },
    "harness|hendrycksTest-logical_fallacies|5": {
        "acc": 0.2822085889570552,
        "acc_stderr": 0.03536117886664742,
        "acc_norm": 0.2822085889570552,
        "acc_norm_stderr": 0.03536117886664742
    },
    "harness|hendrycksTest-machine_learning|5": {
        "acc": 0.32142857142857145,
        "acc_stderr": 0.04432804055291519,
        "acc_norm": 0.32142857142857145,
        "acc_norm_stderr": 0.04432804055291519
    },
    "harness|hendrycksTest-management|5": {
        "acc": 0.1941747572815534,
        "acc_stderr": 0.03916667762822585,
        "acc_norm": 0.1941747572815534,
        "acc_norm_stderr": 0.03916667762822585
    },
    "harness|hendrycksTest-marketing|5": {
        "acc": 0.2905982905982906,
        "acc_stderr": 0.02974504857267404,
        "acc_norm": 0.2905982905982906,
        "acc_norm_stderr": 0.02974504857267404
    },
    "harness|hendrycksTest-medical_genetics|5": {
        "acc": 0.25,
        "acc_stderr": 0.04351941398892446,
        "acc_norm": 0.25,
        "acc_norm_stderr": 0.04351941398892446
    },
    "harness|hendrycksTest-miscellaneous|5": {
        "acc": 0.23627075351213284,
        "acc_stderr": 0.015190473717037497,
        "acc_norm": 0.23627075351213284,
        "acc_norm_stderr": 0.015190473717037497
    },
    "harness|hendrycksTest-moral_disputes|5": {
        "acc": 0.24566473988439305,
        "acc_stderr": 0.02317629820399201,
        "acc_norm": 0.24566473988439305,
        "acc_norm_stderr": 0.02317629820399201
    },
    "harness|hendrycksTest-moral_scenarios|5": {
        "acc": 0.24581005586592178,
        "acc_stderr": 0.014400296429225587,
        "acc_norm": 0.24581005586592178,
        "acc_norm_stderr": 0.014400296429225587
    },
    "harness|hendrycksTest-nutrition|5": {
        "acc": 0.25163398692810457,
        "acc_stderr": 0.024848018263875195,
        "acc_norm": 0.25163398692810457,
        "acc_norm_stderr": 0.024848018263875195
    },
    "harness|hendrycksTest-philosophy|5": {
        "acc": 0.18006430868167203,
        "acc_stderr": 0.021823422857744953,
        "acc_norm": 0.18006430868167203,
        "acc_norm_stderr": 0.021823422857744953
    },
    "harness|hendrycksTest-prehistory|5": {
        "acc": 0.25617283950617287,
        "acc_stderr": 0.024288533637726095,
        "acc_norm": 0.25617283950617287,
        "acc_norm_stderr": 0.024288533637726095
    },
    "harness|hendrycksTest-professional_accounting|5": {
        "acc": 0.2801418439716312,
        "acc_stderr": 0.02678917235114023,
        "acc_norm": 0.2801418439716312,
        "acc_norm_stderr": 0.02678917235114023
    },
    "harness|hendrycksTest-professional_law|5": {
        "acc": 0.24837027379400262,
        "acc_stderr": 0.011035212598034503,
        "acc_norm": 0.24837027379400262,
        "acc_norm_stderr": 0.011035212598034503
    },
    "harness|hendrycksTest-professional_medicine|5": {
        "acc": 0.3125,
        "acc_stderr": 0.02815637344037142,
        "acc_norm": 0.3125,
        "acc_norm_stderr": 0.02815637344037142
    },
    "harness|hendrycksTest-professional_psychology|5": {
        "acc": 0.25,
        "acc_stderr": 0.01751781884501444,
        "acc_norm": 0.25,
        "acc_norm_stderr": 0.01751781884501444
    },
    "harness|hendrycksTest-public_relations|5": {
        "acc": 0.18181818181818182,
        "acc_stderr": 0.03694284335337801,
        "acc_norm": 0.18181818181818182,
        "acc_norm_stderr": 0.03694284335337801
    },
    "harness|hendrycksTest-security_studies|5": {
        "acc": 0.31020408163265306,
        "acc_stderr": 0.029613459872484378,
        "acc_norm": 0.31020408163265306,
        "acc_norm_stderr": 0.029613459872484378
    },
    "harness|hendrycksTest-sociology|5": {
        "acc": 0.24875621890547264,
        "acc_stderr": 0.030567675938916707,
        "acc_norm": 0.24875621890547264,
        "acc_norm_stderr": 0.030567675938916707
    },
    "harness|hendrycksTest-us_foreign_policy|5": {
        "acc": 0.25,
        "acc_stderr": 0.04351941398892446,
        "acc_norm": 0.25,
        "acc_norm_stderr": 0.04351941398892446
    },
    "harness|hendrycksTest-virology|5": {
        "acc": 0.19879518072289157,
        "acc_stderr": 0.03106939026078942,
        "acc_norm": 0.19879518072289157,
        "acc_norm_stderr": 0.03106939026078942
    },
    "harness|hendrycksTest-world_religions|5": {
        "acc": 0.29239766081871343,
        "acc_stderr": 0.034886477134579215,
        "acc_norm": 0.29239766081871343,
        "acc_norm_stderr": 0.034886477134579215
    },
    "harness|truthfulqa:mc|0": {
        "mc1": 0.2521419828641371,
        "mc1_stderr": 0.015201522246299965,
        "mc2": 0.41257163824244014,
        "mc2_stderr": 0.015127188811834062
    },
    "harness|winogrande|5": {
        "acc": 0.4996053670086819,
        "acc_stderr": 0.014052481306049512
    },
    "harness|gsm8k|5": {
        "acc": 0.0,
        "acc_stderr": 0.0
    }
}