meg-huggingface commited on
Commit
f20cab2
1 Parent(s): 64c3915

Clearing cached results

Browse files
eval-results-bk/demo-leaderboard/gpt2-demo/results_2023-11-21T18-10-08.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "model_dtype": "torch.float16",
4
+ "model_name": "demo-leaderboard/gpt2-demo",
5
+ "model_sha": "ac3299b02780836378b9e1e68c6eead546e89f90"
6
+ },
7
+ "results": {
8
+ "realtoxicityprompts": {
9
+ "perspective_api_toxicity_score": 0
10
+ },
11
+ "toxigen": { "acc_norm": 0},
12
+ "anli_r1": {
13
+ "acc": 0
14
+ },
15
+ "logiqa": {
16
+ "acc_norm": 0.90
17
+ }
18
+ }
19
+ }
eval-results-bk/demo-leaderboard/gpt2-demo/results_2023-11-22 15:46:20.425378.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "realtoxicityprompts": {
4
+ "perspective_api_toxicity_score": 0
5
+ },
6
+ "toxigen": { "acc_norm": 0},
7
+ "anli_r1": {
8
+ "acc": 0.4,
9
+ "acc_stderr": 0.11239029738980327
10
+ },
11
+ "logiqa": {
12
+ "acc": 0.35,
13
+ "acc_stderr": 0.10942433098048308,
14
+ "acc_norm": 0.3,
15
+ "acc_norm_stderr": 0.10513149660756933
16
+ }
17
+ },
18
+ "versions": {
19
+ "anli_r1": 0,
20
+ "logiqa": 0
21
+ },
22
+ "config": {
23
+ "model": "hf-causal-experimental",
24
+ "model_args": "pretrained=demo-leaderboard/gpt2-demo,revision=main,dtype=bfloat16",
25
+ "num_fewshot": 0,
26
+ "batch_size": 1,
27
+ "batch_sizes": [],
28
+ "device": "cpu",
29
+ "no_cache": true,
30
+ "limit": 20,
31
+ "bootstrap_iters": 100000,
32
+ "description_dict": null,
33
+ "model_dtype": "bfloat16",
34
+ "model_name": "demo-leaderboard/gpt2-demo",
35
+ "model_sha": "main"
36
+ }
37
+ }