malteos barthfab commited on
Commit
01f81a6
1 Parent(s): f517ec0

clean tables (#1)

Browse files

- clean tables (a110bcdbc4993d0419f6b84d96bfacde89c64317)


Co-authored-by: Fabio Barth <[email protected]>

Files changed (1) hide show
  1. README.md +67 -46
README.md CHANGED
@@ -114,66 +114,87 @@ Currently, we are working on more suitable benchmarks for Spanish, French, Germa
114
  <details>
115
  <summary>Evaluation results</summary>
116
 
117
- ### English
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
118
 
119
- | | arc_challenge | belebele | hellaswag | mmlu | truthfulqa | avg |
120
- |:-------------------------------------|----------------:|-----------:|------------:|---------:|-------------:|---------:|
121
- | occiglot/occiglot-7b-eu5 | 0.530717 | 0.726667 | 0.789882 | 0.531904 | 0.403678 | 0.59657 |
122
- | occiglot/occiglot-7b-eu5-instruct | 0.558874 | 0.746667 | 0.799841 | 0.535109 | 0.449034 | 0.617905 |
123
- | occiglot/occiglot-7b-de-en | 0.556314 | 0.791111 | 0.803824 | 0.568438 | 0.423251 | 0.628587 |
124
- | occiglot/occiglot-7b-de-en-instruct | 0.604096 | 0.812222 | 0.80004 | 0.570574 | 0.493807 | 0.656148 |
125
- | LeoLM/leo-mistral-hessianai-7b | 0.522184 | 0.736667 | 0.777833 | 0.538812 | 0.429248 | 0.600949 |
126
- | mistralai/Mistral-7B-v0.1 | 0.612628 | 0.844444 | 0.834097 | 0.624555 | 0.426201 | 0.668385 |
127
- | mistralai/Mistral-7B-Instruct-v0.2 | 0.637372 | 0.824444 | 0.846345 | 0.59201 | 0.668116 | 0.713657 |
128
 
 
 
 
 
 
 
 
129
 
130
  ### German
131
 
132
- | | arc_challenge_de | belebele_de | hellaswag_de | mmlu_de | truthfulqa_de | avg |
133
- |:-------------------------------------|-------------------:|--------------:|---------------:|----------:|----------------:|---------:|
134
- | occiglot/occiglot-7b-eu5 | 0.493584 | 0.646667 | 0.666631 | 0.483406 | 0.251269 | 0.508311 |
135
- | occiglot/occiglot-7b-eu5-instruct | 0.529512 | 0.667778 | 0.685205 | 0.488234 | 0.286802 | 0.531506 |
136
- | occiglot/occiglot-7b-de-en | 0.50556 | 0.743333 | 0.67421 | 0.514633 | 0.26269 | 0.540085 |
137
- | occiglot/occiglot-7b-de-en-instruct | 0.54491 | 0.772222 | 0.688407 | 0.515915 | 0.310914 | 0.566474 |
138
- | LeoLM/leo-mistral-hessianai-7b | 0.474765 | 0.691111 | 0.682109 | 0.488309 | 0.252538 | 0.517766 |
139
- | mistralai/Mistral-7B-v0.1 | 0.476476 | 0.738889 | 0.610589 | 0.529567 | 0.284264 | 0.527957 |
140
- | mistralai/Mistral-7B-Instruct-v0.2 | 0.485885 | 0.688889 | 0.622438 | 0.501961 | 0.376904 | 0.535215 |
141
 
142
  ### Spanish
143
 
144
- | | arc_challenge_es | belebele_es | hellaswag_es | mmlu_es | truthfulqa_es | avg |
145
- |:-------------------------------------|-------------------:|--------------:|---------------:|----------:|----------------:|---------:|
146
- | occiglot/occiglot-7b-eu5 | 0.508547 | 0.676667 | 0.725411 | 0.499325 | 0.25602 | 0.533194 |
147
- | occiglot/occiglot-7b-eu5-instruct | 0.535043 | 0.68 | 0.737039 | 0.503525 | 0.285171 | 0.548155 |
148
- | occiglot/occiglot-7b-es-en | 0.529915 | 0.627778 | 0.72253 | 0.512749 | 0.243346 | 0.527264 |
149
- | occiglot/occiglot-7b-es-en-instruct | 0.545299 | 0.636667 | 0.734372 | 0.524374 | 0.257288 | 0.5396 |
150
- | clibrain/lince-mistral-7b-it-es | 0.52906 | 0.721111 | 0.687967 | 0.512749 | 0.285171 | 0.547212 |
151
- | mistralai/Mistral-7B-v0.1 | 0.528205 | 0.747778 | 0.672712 | 0.544023 | 0.281369 | 0.554817 |
152
- | mistralai/Mistral-7B-Instruct-v0.2 | 0.54188 | 0.73 | 0.685406 | 0.511699 | 0.373891 | 0.568575 |
153
 
154
  ### French
155
 
156
- | | arc_challenge_fr | belebele_fr | hellaswag_fr | mmlu_fr | truthfulqa_fr | avg |
157
- |:-------------------------------------|-------------------:|--------------:|---------------:|----------:|----------------:|---------:|
158
- | occiglot/occiglot-7b-eu5 | 0.506416 | 0.675556 | 0.712358 | 0.495684 | 0.23507 | 0.525017 |
159
- | occiglot/occiglot-7b-eu5-instruct | 0.541488 | 0.7 | 0.724245 | 0.499122 | 0.306226 | 0.554216 |
160
- | occiglot/occiglot-7b-fr-en | 0.532934 | 0.706667 | 0.718891 | 0.51333 | 0.242694 | 0.542903 |
161
- | occiglot/occiglot-7b-fr-en-instruct | 0.542344 | 0.752222 | 0.72553 | 0.52051 | 0.29479 | 0.567079 |
162
- | OpenLLM-France/Claire-Mistral-7B-0.1 | 0.486741 | 0.694444 | 0.642964 | 0.479566 | 0.271919 | 0.515127 |
163
- | mistralai/Mistral-7B-v0.1 | 0.525235 | 0.776667 | 0.66481 | 0.543121 | 0.280813 | 0.558129 |
164
- | mistralai/Mistral-7B-Instruct-v0.2 | 0.551754 | 0.758889 | 0.67916 | 0.506837 | 0.382465 | 0.575821 |
 
165
 
166
  ### Italian
167
 
168
- | | arc_challenge_it | belebele_it | hellaswag_it | mmlu_it | truthfulqa_it | avg |
169
- |:-------------------------------------|-------------------:|--------------:|---------------:|----------:|----------------:|---------:|
170
- | occiglot/occiglot-7b-eu5 | 0.501283 | 0.652222 | 0.700533 | 0 | 0.252874 | 0.421382 |
171
- | occiglot/occiglot-7b-eu5-instruct | 0.516681 | 0.661111 | 0.71326 | 0 | 0.295019 | 0.437214 |
172
- | occiglot/occiglot-7b-it-en | 0.536356 | 0.684444 | 0.694768 | 0 | 0.247765 | 0.432667 |
173
- | occiglot/occiglot-7b-it-en-instruct | 0.545766 | 0.717778 | 0.713804 | 0 | 0.303959 | 0.456261 |
174
- | galatolo/cerbero-7b | 0.522669 | 0.717778 | 0.631567 | 0 | 0.302682 | 0.434939 |
175
- | mistralai/Mistral-7B-v0.1 | 0.502139 | 0.734444 | 0.630371 | 0 | 0.264368 | 0.426264 |
176
- | mistralai/Mistral-7B-Instruct-v0.2 | 0.519247 | 0.703333 | 0.6394 | 0 | 0.349936 | 0.442383 |
 
177
 
178
  </details>
179
 
 
114
  <details>
115
  <summary>Evaluation results</summary>
116
 
117
+ ### All 5 Languages
118
+
119
+ | | avg | arc_challenge | belebele | hellaswag | mmlu | truthfulqa |
120
+ |:---------------------------|---------:|----------------:|-----------:|------------:|---------:|-------------:|
121
+ | Occiglot-7b-eu5 | 0.516895 | 0.508109 | 0.675556 | 0.718963 | 0.402064 | 0.279782 |
122
+ | Occiglot-7b-eu5-instruct | 0.537799 | 0.53632 | 0.691111 | 0.731918 | 0.405198 | 0.32445 |
123
+ | Occiglot-7b-de-en | 0.518337 | 0.496297 | 0.715111 | 0.669034 | 0.412545 | 0.298697 |
124
+ | Occiglot-7b-de-en-instruct | 0.543173 | 0.530826 | 0.745778 | 0.67676 | 0.411326 | 0.351176 |
125
+ | Occiglot-7b-it-en | 0.513221 | 0.500564 | 0.694444 | 0.668099 | 0.413528 | 0.289469 |
126
+ | Occiglot-7b-it-en-instruct | 0.53721 | 0.523128 | 0.726667 | 0.683414 | 0.414918 | 0.337927 |
127
+ | Occiglot-7b-fr-en | 0.509209 | 0.496806 | 0.691333 | 0.667475 | 0.409129 | 0.281303 |
128
+ | Occiglot-7b-fr-en-instruct | 0.52884 | 0.515613 | 0.723333 | 0.67371 | 0.413024 | 0.318521 |
129
+ | Occiglot-7b-es-en | 0.483388 | 0.482949 | 0.606889 | 0.653902 | 0.398922 | 0.274277 |
130
+ | Occiglot-7b-es-en-instruct | 0.504023 | 0.494576 | 0.65 | 0.670847 | 0.406176 | 0.298513 |
131
+ | Leo-mistral-hessianai-7b | 0.484806 | 0.462103 | 0.653556 | 0.642242 | 0.379208 | 0.28692 |
132
+ | Claire-mistral-7b-0.1 | 0.514226 | 0.502773 | 0.705111 | 0.666871 | 0.412128 | 0.284245 |
133
+ | Lince-mistral-7b-it-es | 0.543427 | 0.540222 | 0.745111 | 0.692931 | 0.426241 | 0.312629 |
134
+ | Cerbero-7b | 0.532385 | 0.513714 | 0.743111 | 0.654061 | 0.427566 | 0.323475 |
135
+ | Mistral-7b-v0.1 | 0.547111 | 0.528937 | 0.768444 | 0.682516 | 0.448253 | 0.307403 |
136
+ | Mistral-7b-instruct-v0.2 | 0.56713 | 0.547228 | 0.741111 | 0.69455 | 0.422501 | 0.430262 |
137
+
138
 
139
+ ### English
 
 
 
 
 
 
 
 
140
 
141
+ | | avg | arc_challenge | belebele | hellaswag | mmlu | truthfulqa |
142
+ |:---------------------------|---------:|----------------:|-----------:|------------:|---------:|-------------:|
143
+ | Occiglot-7b-eu5 | 0.59657 | 0.530717 | 0.726667 | 0.789882 | 0.531904 | 0.403678 |
144
+ | Occiglot-7b-eu5-instruct | 0.617905 | 0.558874 | 0.746667 | 0.799841 | 0.535109 | 0.449 |
145
+ | Leo-mistral-hessianai-7b | 0.600949 | 0.522184 | 0.736667 | 0.777833 | 0.538812 | 0.429248 |
146
+ | Mistral-7b-v0.1 | 0.668385 | 0.612628 | 0.844444 | 0.834097 | 0.624555 | 0.426201 |
147
+ | Mistral-7b-instruct-v0.2 | 0.713657 | 0.637372 | 0.824444 | 0.846345 | 0.59201 | 0.668116 |
148
 
149
  ### German
150
 
151
+ | | avg | arc_challenge_de | belebele_de | hellaswag_de | mmlu_de | truthfulqa_de |
152
+ |:---------------------------|---------:|-------------------:|--------------:|---------------:|----------:|----------------:|
153
+ | Occiglot-7b-eu5 | 0.508311 | 0.493584 | 0.646667 | 0.666631 | 0.483406 | 0.251269 |
154
+ | Occiglot-7b-eu5-instruct | 0.531506 | 0.529512 | 0.667778 | 0.685205 | 0.488234 | 0.286802 |
155
+ | Occiglot-7b-de-en | 0.540085 | 0.50556 | 0.743333 | 0.67421 | 0.514633 | 0.26269 |
156
+ | Occiglot-7b-de-en-instruct | 0.566474 | 0.54491 | 0.772222 | 0.688407 | 0.515915 | 0.310914 |
157
+ | Leo-mistral-hessianai-7b | 0.517766 | 0.474765 | 0.691111 | 0.682109 | 0.488309 | 0.252538 |
158
+ | Mistral-7b-v0.1 | 0.527957 | 0.476476 | 0.738889 | 0.610589 | 0.529567 | 0.284264 |
159
+ | Mistral-7b-instruct-v0.2 | 0.535215 | 0.485885 | 0.688889 | 0.622438 | 0.501961 | 0.376904 |
160
 
161
  ### Spanish
162
 
163
+ | | avg | arc_challenge_es | belebele_es | hellaswag_es | mmlu_es | truthfulqa_es |
164
+ |:---------------------------|---------:|-------------------:|--------------:|---------------:|----------:|----------------:|
165
+ | Occiglot-7b-eu5 | 0.533194 | 0.508547 | 0.676667 | 0.725411 | 0.499325 | 0.25602 |
166
+ | Occiglot-7b-eu5-instruct | 0.548155 | 0.535043 | 0.68 | 0.737039 | 0.503525 | 0.285171 |
167
+ | Occiglot-7b-es-en | 0.527264 | 0.529915 | 0.627778 | 0.72253 | 0.512749 | 0.243346 |
168
+ | Occiglot-7b-es-en-instruct | 0.5396 | 0.545299 | 0.636667 | 0.734372 | 0.524374 | 0.257288 |
169
+ | Lince-mistral-7b-it-es | 0.547212 | 0.52906 | 0.721111 | 0.687967 | 0.512749 | 0.285171 |
170
+ | Mistral-7b-v0.1 | 0.554817 | 0.528205 | 0.747778 | 0.672712 | 0.544023 | 0.281369 |
171
+ | Mistral-7b-instruct-v0.2 | 0.568575 | 0.54188 | 0.73 | 0.685406 | 0.511699 | 0.373891 |
172
 
173
  ### French
174
 
175
+ | | avg | arc_challenge_fr | belebele_fr | hellaswag_fr | mmlu_fr | truthfulqa_fr |
176
+ |:---------------------------|---------:|-------------------:|--------------:|---------------:|----------:|----------------:|
177
+ | Occiglot-7b-eu5 | 0.525017 | 0.506416 | 0.675556 | 0.712358 | 0.495684 | 0.23507 |
178
+ | Occiglot-7b-eu5-instruct | 0.554216 | 0.541488 | 0.7 | 0.724245 | 0.499122 | 0.306226 |
179
+ | Occiglot-7b-fr-en | 0.542903 | 0.532934 | 0.706667 | 0.718891 | 0.51333 | 0.242694 |
180
+ | Occiglot-7b-fr-en-instruct | 0.567079 | 0.542344 | 0.752222 | 0.72553 | 0.52051 | 0.29479 |
181
+ | Claire-mistral-7b-0.1 | 0.515127 | 0.486741 | 0.694444 | 0.642964 | 0.479566 | 0.271919 |
182
+ | Cerbero-7b | 0.526044 | 0.462789 | 0.735556 | 0.624438 | 0.516462 | 0.290978 |
183
+ | Mistral-7b-v0.1 | 0.558129 | 0.525235 | 0.776667 | 0.66481 | 0.543121 | 0.280813 |
184
+ | Mistral-7b-instruct-v0.2 | 0.575821 | 0.551754 | 0.758889 | 0.67916 | 0.506837 | 0.382465 |
185
 
186
  ### Italian
187
 
188
+ | | avg | arc_challenge_it | belebele_it | hellaswag_it | mmlu_it | truthfulqa_it |
189
+ |:---------------------------|---------:|-------------------:|--------------:|---------------:|----------:|----------------:|
190
+ | Occiglot-7b-eu5 | 0.421382 | 0.501283 | 0.652222 | 0.700533 | 0 | 0.252874 |
191
+ | Occiglot-7b-eu5-instruct | 0.437214 | 0.516681 | 0.661111 | 0.71326 | 0 | 0.295019 |
192
+ | Occiglot-7b-it-en | 0.432667 | 0.536356 | 0.684444 | 0.694768 | 0 | 0.247765 |
193
+ | Occiglot-7b-it-en-instruct | 0.456261 | 0.545766 | 0.717778 | 0.713804 | 0 | 0.303959 |
194
+ | Cerbero-7b | 0.434939 | 0.522669 | 0.717778 | 0.631567 | 0 | 0.302682 |
195
+ | Mistral-7b-v0.1 | 0.426264 | 0.502139 | 0.734444 | 0.630371 | 0 | 0.264368 |
196
+ | Mistral-7b-instruct-v0.2 | 0.442383 | 0.519247 | 0.703333 | 0.6394 | 0 | 0.349936 |
197
+
198
 
199
  </details>
200