{ "results": { "arc_de_challenge": { "acc": 0.2348993288590604, "acc_stderr": 0.024599255015999244, "acc_norm": 0.28187919463087246, "acc_norm_stderr": 0.026106703750007426 } }, "versions": { "arc_de_challenge": 0 }, "config": { "model": "hf-auto", "model_args": "pretrained=bigscience/bloom-560m", "batch_size": "1", "device": "cuda", "no_cache": false, "limit": null, "bootstrap_iters": 100000, "description_dict": {} } }