ppo-Huggy / run_logs /training_status.json
aviroes's picture
Huggy
5e45c97
{
"Huggy": {
"checkpoints": [
{
"steps": 199917,
"file_path": "results/Huggy/Huggy/Huggy-199917.onnx",
"reward": 3.504016226337802,
"creation_time": 1700427730.3522549,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199917.pt"
]
},
{
"steps": 399926,
"file_path": "results/Huggy/Huggy/Huggy-399926.onnx",
"reward": 3.8125232512300666,
"creation_time": 1700427977.2006705,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399926.pt"
]
},
{
"steps": 599886,
"file_path": "results/Huggy/Huggy/Huggy-599886.onnx",
"reward": 3.695196953686801,
"creation_time": 1700428222.4267807,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599886.pt"
]
},
{
"steps": 799957,
"file_path": "results/Huggy/Huggy/Huggy-799957.onnx",
"reward": 4.048090691262103,
"creation_time": 1700428461.0012884,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799957.pt"
]
},
{
"steps": 999914,
"file_path": "results/Huggy/Huggy/Huggy-999914.onnx",
"reward": 4.338625831197398,
"creation_time": 1700428704.7204366,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999914.pt"
]
},
{
"steps": 1199971,
"file_path": "results/Huggy/Huggy/Huggy-1199971.onnx",
"reward": 3.544655633978097,
"creation_time": 1700428948.0874612,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199971.pt"
]
},
{
"steps": 1399955,
"file_path": "results/Huggy/Huggy/Huggy-1399955.onnx",
"reward": 2.726806322733561,
"creation_time": 1700429192.8649738,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399955.pt"
]
},
{
"steps": 1599964,
"file_path": "results/Huggy/Huggy/Huggy-1599964.onnx",
"reward": 3.9989679969492413,
"creation_time": 1700429443.1171112,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599964.pt"
]
},
{
"steps": 1799887,
"file_path": "results/Huggy/Huggy/Huggy-1799887.onnx",
"reward": 3.876592622581103,
"creation_time": 1700429704.375589,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799887.pt"
]
},
{
"steps": 1999928,
"file_path": "results/Huggy/Huggy/Huggy-1999928.onnx",
"reward": 3.664196695830371,
"creation_time": 1700429956.3121612,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999928.pt"
]
},
{
"steps": 2000036,
"file_path": "results/Huggy/Huggy/Huggy-2000036.onnx",
"reward": 3.676101950009664,
"creation_time": 1700429956.4178786,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000036.pt"
]
}
],
"final_checkpoint": {
"steps": 2000036,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.676101950009664,
"creation_time": 1700429956.4178786,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000036.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}