mtasic85 commited on
Commit
3fd6374
1 Parent(s): 4b097de
Files changed (3) hide show
  1. LM_EVAL.md +0 -80
  2. README.md +88 -1
  3. scripts/TRAIN.md +6 -0
LM_EVAL.md DELETED
@@ -1,80 +0,0 @@
1
- | Tasks |Version| Filter |n-shot| Metric | |Value | |Stderr|
2
- |---------------------------------------|------:|----------------|-----:|-----------|---|-----:|---|-----:|
3
- |arc_challenge | 1|none | 0|acc |↑ |0.1937|± |0.0115|
4
- | | |none | 0|acc_norm |↑ |0.2363|± |0.0124|
5
- |gsm8k | 3|flexible-extract| 5|exact_match|↑ |0.0136|± |0.0032|
6
- | | |strict-match | 5|exact_match|↑ |0.0000|± |0.0000|
7
- |hellaswag | 1|none | 0|acc |↑ |0.2659|± |0.0044|
8
- | | |none | 0|acc_norm |↑ |0.2709|± |0.0044|
9
- |mmlu | 2|none | |acc |↑ |0.2309|± |0.0036|
10
- | - humanities | 2|none | |acc |↑ |0.2370|± |0.0062|
11
- | - formal_logic | 1|none | 0|acc |↑ |0.2778|± |0.0401|
12
- | - high_school_european_history | 1|none | 0|acc |↑ |0.2303|± |0.0329|
13
- | - high_school_us_history | 1|none | 0|acc |↑ |0.2402|± |0.0300|
14
- | - high_school_world_history | 1|none | 0|acc |↑ |0.2405|± |0.0278|
15
- | - international_law | 1|none | 0|acc |↑ |0.1983|± |0.0364|
16
- | - jurisprudence | 1|none | 0|acc |↑ |0.2315|± |0.0408|
17
- | - logical_fallacies | 1|none | 0|acc |↑ |0.1840|± |0.0304|
18
- | - moral_disputes | 1|none | 0|acc |↑ |0.2110|± |0.0220|
19
- | - moral_scenarios | 1|none | 0|acc |↑ |0.2380|± |0.0142|
20
- | - philosophy | 1|none | 0|acc |↑ |0.1994|± |0.0227|
21
- | - prehistory | 1|none | 0|acc |↑ |0.2315|± |0.0235|
22
- | - professional_law | 1|none | 0|acc |↑ |0.2510|± |0.0111|
23
- | - world_religions | 1|none | 0|acc |↑ |0.2865|± |0.0347|
24
- | - other | 2|none | |acc |↑ |0.2372|± |0.0076|
25
- | - business_ethics | 1|none | 0|acc |↑ |0.2900|± |0.0456|
26
- | - clinical_knowledge | 1|none | 0|acc |↑ |0.2113|± |0.0251|
27
- | - college_medicine | 1|none | 0|acc |↑ |0.2023|± |0.0306|
28
- | - global_facts | 1|none | 0|acc |↑ |0.1900|± |0.0394|
29
- | - human_aging | 1|none | 0|acc |↑ |0.3004|± |0.0308|
30
- | - management | 1|none | 0|acc |↑ |0.1748|± |0.0376|
31
- | - marketing | 1|none | 0|acc |↑ |0.2863|± |0.0296|
32
- | - medical_genetics | 1|none | 0|acc |↑ |0.2700|± |0.0446|
33
- | - miscellaneous | 1|none | 0|acc |↑ |0.2337|± |0.0151|
34
- | - nutrition | 1|none | 0|acc |↑ |0.2255|± |0.0239|
35
- | - professional_accounting | 1|none | 0|acc |↑ |0.2411|± |0.0255|
36
- | - professional_medicine | 1|none | 0|acc |↑ |0.1985|± |0.0242|
37
- | - virology | 1|none | 0|acc |↑ |0.2711|± |0.0346|
38
- | - social sciences | 2|none | |acc |↑ |0.2278|± |0.0076|
39
- | - econometrics | 1|none | 0|acc |↑ |0.2105|± |0.0384|
40
- | - high_school_geography | 1|none | 0|acc |↑ |0.1768|± |0.0272|
41
- | - high_school_government_and_politics| 1|none | 0|acc |↑ |0.2280|± |0.0303|
42
- | - high_school_macroeconomics | 1|none | 0|acc |↑ |0.2436|± |0.0218|
43
- | - high_school_microeconomics | 1|none | 0|acc |↑ |0.2395|± |0.0277|
44
- | - high_school_psychology | 1|none | 0|acc |↑ |0.2037|± |0.0173|
45
- | - human_sexuality | 1|none | 0|acc |↑ |0.2595|± |0.0384|
46
- | - professional_psychology | 1|none | 0|acc |↑ |0.2386|± |0.0172|
47
- | - public_relations | 1|none | 0|acc |↑ |0.2091|± |0.0390|
48
- | - security_studies | 1|none | 0|acc |↑ |0.2490|± |0.0277|
49
- | - sociology | 1|none | 0|acc |↑ |0.1990|± |0.0282|
50
- | - us_foreign_policy | 1|none | 0|acc |↑ |0.3100|± |0.0465|
51
- | - stem | 2|none | |acc |↑ |0.2185|± |0.0074|
52
- | - abstract_algebra | 1|none | 0|acc |↑ |0.2600|± |0.0441|
53
- | - anatomy | 1|none | 0|acc |↑ |0.1630|± |0.0319|
54
- | - astronomy | 1|none | 0|acc |↑ |0.2237|± |0.0339|
55
- | - college_biology | 1|none | 0|acc |↑ |0.2708|± |0.0372|
56
- | - college_chemistry | 1|none | 0|acc |↑ |0.2300|± |0.0423|
57
- | - college_computer_science | 1|none | 0|acc |↑ |0.2100|± |0.0409|
58
- | - college_mathematics | 1|none | 0|acc |↑ |0.2200|± |0.0416|
59
- | - college_physics | 1|none | 0|acc |↑ |0.2647|± |0.0439|
60
- | - computer_security | 1|none | 0|acc |↑ |0.3000|± |0.0461|
61
- | - conceptual_physics | 1|none | 0|acc |↑ |0.2000|± |0.0261|
62
- | - electrical_engineering | 1|none | 0|acc |↑ |0.2345|± |0.0353|
63
- | - elementary_mathematics | 1|none | 0|acc |↑ |0.2302|± |0.0217|
64
- | - high_school_biology | 1|none | 0|acc |↑ |0.1903|± |0.0223|
65
- | - high_school_chemistry | 1|none | 0|acc |↑ |0.1527|± |0.0253|
66
- | - high_school_computer_science | 1|none | 0|acc |↑ |0.2700|± |0.0446|
67
- | - high_school_mathematics | 1|none | 0|acc |↑ |0.1926|± |0.0240|
68
- | - high_school_physics | 1|none | 0|acc |↑ |0.2053|± |0.0330|
69
- | - high_school_statistics | 1|none | 0|acc |↑ |0.2130|± |0.0279|
70
- | - machine_learning | 1|none | 0|acc |↑ |0.2768|± |0.0425|
71
- |truthfulqa_mc2 | 2|none | 0|acc |↑ |0.4683|± |0.0160|
72
- |winogrande | 1|none | 0|acc |↑ |0.5075|± |0.0141|
73
-
74
- | Groups |Version|Filter|n-shot|Metric| |Value | |Stderr|
75
- |------------------|------:|------|------|------|---|-----:|---|-----:|
76
- |mmlu | 2|none | |acc |↑ |0.2309|± |0.0036|
77
- | - humanities | 2|none | |acc |↑ |0.2370|± |0.0062|
78
- | - other | 2|none | |acc |↑ |0.2372|± |0.0076|
79
- | - social sciences| 2|none | |acc |↑ |0.2278|± |0.0076|
80
- | - stem | 2|none | |acc |↑ |0.2185|± |0.0074|
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
README.md CHANGED
@@ -22,4 +22,91 @@ tags:
22
  - litdata
23
  ---
24
 
25
- # tangled-llama-x-32k-base-v0.1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  - litdata
23
  ---
24
 
25
+ # tangled-llama-108m-32k-base-v0.1
26
+
27
+ ## lm-evaluation-harness
28
+
29
+ ```
30
+
31
+ ```
32
+
33
+ | Tasks |Version| Filter |n-shot| Metric | |Value | |Stderr|
34
+ |---------------------------------------|------:|----------------|-----:|-----------|---|-----:|---|-----:|
35
+ |arc_challenge | 1|none | 0|acc |↑ |0.1937|± |0.0115|
36
+ | | |none | 0|acc_norm |↑ |0.2363|± |0.0124|
37
+ |gsm8k | 3|flexible-extract| 5|exact_match|↑ |0.0136|± |0.0032|
38
+ | | |strict-match | 5|exact_match|↑ |0.0000|± |0.0000|
39
+ |hellaswag | 1|none | 0|acc |↑ |0.2659|± |0.0044|
40
+ | | |none | 0|acc_norm |↑ |0.2709|± |0.0044|
41
+ |mmlu | 2|none | |acc |↑ |0.2309|± |0.0036|
42
+ | - humanities | 2|none | |acc |↑ |0.2370|± |0.0062|
43
+ | - formal_logic | 1|none | 0|acc |↑ |0.2778|± |0.0401|
44
+ | - high_school_european_history | 1|none | 0|acc |↑ |0.2303|± |0.0329|
45
+ | - high_school_us_history | 1|none | 0|acc |↑ |0.2402|± |0.0300|
46
+ | - high_school_world_history | 1|none | 0|acc |↑ |0.2405|± |0.0278|
47
+ | - international_law | 1|none | 0|acc |↑ |0.1983|± |0.0364|
48
+ | - jurisprudence | 1|none | 0|acc |↑ |0.2315|± |0.0408|
49
+ | - logical_fallacies | 1|none | 0|acc |↑ |0.1840|± |0.0304|
50
+ | - moral_disputes | 1|none | 0|acc |↑ |0.2110|± |0.0220|
51
+ | - moral_scenarios | 1|none | 0|acc |↑ |0.2380|± |0.0142|
52
+ | - philosophy | 1|none | 0|acc |↑ |0.1994|± |0.0227|
53
+ | - prehistory | 1|none | 0|acc |↑ |0.2315|± |0.0235|
54
+ | - professional_law | 1|none | 0|acc |↑ |0.2510|± |0.0111|
55
+ | - world_religions | 1|none | 0|acc |↑ |0.2865|± |0.0347|
56
+ | - other | 2|none | |acc |↑ |0.2372|± |0.0076|
57
+ | - business_ethics | 1|none | 0|acc |↑ |0.2900|± |0.0456|
58
+ | - clinical_knowledge | 1|none | 0|acc |↑ |0.2113|± |0.0251|
59
+ | - college_medicine | 1|none | 0|acc |↑ |0.2023|± |0.0306|
60
+ | - global_facts | 1|none | 0|acc |↑ |0.1900|± |0.0394|
61
+ | - human_aging | 1|none | 0|acc |↑ |0.3004|± |0.0308|
62
+ | - management | 1|none | 0|acc |↑ |0.1748|± |0.0376|
63
+ | - marketing | 1|none | 0|acc |↑ |0.2863|± |0.0296|
64
+ | - medical_genetics | 1|none | 0|acc |↑ |0.2700|± |0.0446|
65
+ | - miscellaneous | 1|none | 0|acc |↑ |0.2337|± |0.0151|
66
+ | - nutrition | 1|none | 0|acc |↑ |0.2255|± |0.0239|
67
+ | - professional_accounting | 1|none | 0|acc |↑ |0.2411|± |0.0255|
68
+ | - professional_medicine | 1|none | 0|acc |↑ |0.1985|± |0.0242|
69
+ | - virology | 1|none | 0|acc |↑ |0.2711|± |0.0346|
70
+ | - social sciences | 2|none | |acc |↑ |0.2278|± |0.0076|
71
+ | - econometrics | 1|none | 0|acc |↑ |0.2105|± |0.0384|
72
+ | - high_school_geography | 1|none | 0|acc |↑ |0.1768|± |0.0272|
73
+ | - high_school_government_and_politics| 1|none | 0|acc |↑ |0.2280|± |0.0303|
74
+ | - high_school_macroeconomics | 1|none | 0|acc |↑ |0.2436|± |0.0218|
75
+ | - high_school_microeconomics | 1|none | 0|acc |↑ |0.2395|± |0.0277|
76
+ | - high_school_psychology | 1|none | 0|acc |↑ |0.2037|± |0.0173|
77
+ | - human_sexuality | 1|none | 0|acc |↑ |0.2595|± |0.0384|
78
+ | - professional_psychology | 1|none | 0|acc |↑ |0.2386|± |0.0172|
79
+ | - public_relations | 1|none | 0|acc |↑ |0.2091|± |0.0390|
80
+ | - security_studies | 1|none | 0|acc |↑ |0.2490|± |0.0277|
81
+ | - sociology | 1|none | 0|acc |↑ |0.1990|± |0.0282|
82
+ | - us_foreign_policy | 1|none | 0|acc |↑ |0.3100|± |0.0465|
83
+ | - stem | 2|none | |acc |↑ |0.2185|± |0.0074|
84
+ | - abstract_algebra | 1|none | 0|acc |↑ |0.2600|± |0.0441|
85
+ | - anatomy | 1|none | 0|acc |↑ |0.1630|± |0.0319|
86
+ | - astronomy | 1|none | 0|acc |↑ |0.2237|± |0.0339|
87
+ | - college_biology | 1|none | 0|acc |↑ |0.2708|± |0.0372|
88
+ | - college_chemistry | 1|none | 0|acc |↑ |0.2300|± |0.0423|
89
+ | - college_computer_science | 1|none | 0|acc |↑ |0.2100|± |0.0409|
90
+ | - college_mathematics | 1|none | 0|acc |↑ |0.2200|± |0.0416|
91
+ | - college_physics | 1|none | 0|acc |↑ |0.2647|± |0.0439|
92
+ | - computer_security | 1|none | 0|acc |↑ |0.3000|± |0.0461|
93
+ | - conceptual_physics | 1|none | 0|acc |↑ |0.2000|± |0.0261|
94
+ | - electrical_engineering | 1|none | 0|acc |↑ |0.2345|± |0.0353|
95
+ | - elementary_mathematics | 1|none | 0|acc |↑ |0.2302|± |0.0217|
96
+ | - high_school_biology | 1|none | 0|acc |↑ |0.1903|± |0.0223|
97
+ | - high_school_chemistry | 1|none | 0|acc |↑ |0.1527|± |0.0253|
98
+ | - high_school_computer_science | 1|none | 0|acc |↑ |0.2700|± |0.0446|
99
+ | - high_school_mathematics | 1|none | 0|acc |↑ |0.1926|± |0.0240|
100
+ | - high_school_physics | 1|none | 0|acc |↑ |0.2053|± |0.0330|
101
+ | - high_school_statistics | 1|none | 0|acc |↑ |0.2130|± |0.0279|
102
+ | - machine_learning | 1|none | 0|acc |↑ |0.2768|± |0.0425|
103
+ |truthfulqa_mc2 | 2|none | 0|acc |↑ |0.4683|± |0.0160|
104
+ |winogrande | 1|none | 0|acc |↑ |0.5075|± |0.0141|
105
+
106
+ | Groups |Version|Filter|n-shot|Metric| |Value | |Stderr|
107
+ |------------------|------:|------|------|------|---|-----:|---|-----:|
108
+ |mmlu | 2|none | |acc |↑ |0.2309|± |0.0036|
109
+ | - humanities | 2|none | |acc |↑ |0.2370|± |0.0062|
110
+ | - other | 2|none | |acc |↑ |0.2372|± |0.0076|
111
+ | - social sciences| 2|none | |acc |↑ |0.2278|± |0.0076|
112
+ | - stem | 2|none | |acc |↑ |0.2185|± |0.0074|
scripts/TRAIN.md CHANGED
@@ -38,3 +38,9 @@ pip install -U -r requirements-lit.in
38
  ```bash
39
  litgpt pretrain --config ./model.yaml
40
  ```
 
 
 
 
 
 
 
38
  ```bash
39
  litgpt pretrain --config ./model.yaml
40
  ```
41
+
42
+ ## Evaluate
43
+
44
+ ```bash
45
+ litgpt evaluate --tasks 'hellaswag,gsm8k,truthfulqa_mc2,mmlu,winogrande,arc_challenge' --batch_size 8 out/pretrain/final/
46
+ ```