|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.743125, |
|
"eval_accuracy_label_arts, culture, entertainment and media": 0.6842105263157895, |
|
"eval_accuracy_label_conflict, war and peace": 0.7350689127105666, |
|
"eval_accuracy_label_crime, law and justice": 0.8917525773195877, |
|
"eval_accuracy_label_disaster, accident, and emergency incident": 0.8698630136986302, |
|
"eval_accuracy_label_economy, business, and finance": 0.6893203883495146, |
|
"eval_accuracy_label_environment": 0.4482758620689655, |
|
"eval_accuracy_label_health": 0.7222222222222222, |
|
"eval_accuracy_label_human interest": 0.3181818181818182, |
|
"eval_accuracy_label_labour": 0.5, |
|
"eval_accuracy_label_lifestyle and leisure": 0.5555555555555556, |
|
"eval_accuracy_label_politics": 0.7909407665505227, |
|
"eval_accuracy_label_religion": 0.0, |
|
"eval_accuracy_label_science and technology": 0.4583333333333333, |
|
"eval_accuracy_label_society": 0.35384615384615387, |
|
"eval_accuracy_label_sport": 0.9615384615384616, |
|
"eval_accuracy_label_weather": 0.0, |
|
"eval_f1": 0.747448709907019, |
|
"eval_loss": 0.8030127882957458, |
|
"eval_precision": 0.7695117491320402, |
|
"eval_recall": 0.743125, |
|
"eval_runtime": 12.5023, |
|
"eval_samples_per_second": 127.976, |
|
"eval_steps_per_second": 7.999, |
|
"step": 2064 |
|
} |