ppo-Huggy / run_logs /training_status.json
niltheory's picture
Huggy
d6db7cc
{
"Huggy": {
"checkpoints": [
{
"steps": 199849,
"file_path": "results/Huggy/Huggy/Huggy-199849.onnx",
"reward": 3.3960285733143487,
"creation_time": 1701079059.129975,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199849.pt"
]
},
{
"steps": 399946,
"file_path": "results/Huggy/Huggy/Huggy-399946.onnx",
"reward": 4.024484683360372,
"creation_time": 1701079312.1799722,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399946.pt"
]
},
{
"steps": 599876,
"file_path": "results/Huggy/Huggy/Huggy-599876.onnx",
"reward": 2.9312647208571434,
"creation_time": 1701079566.5841384,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599876.pt"
]
},
{
"steps": 799660,
"file_path": "results/Huggy/Huggy/Huggy-799660.onnx",
"reward": 3.5668381304605634,
"creation_time": 1701079808.1285608,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799660.pt"
]
},
{
"steps": 999986,
"file_path": "results/Huggy/Huggy/Huggy-999986.onnx",
"reward": 3.5666815819947617,
"creation_time": 1701080055.330834,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999986.pt"
]
},
{
"steps": 1199795,
"file_path": "results/Huggy/Huggy/Huggy-1199795.onnx",
"reward": 3.669899903403388,
"creation_time": 1701080299.8067372,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199795.pt"
]
},
{
"steps": 1399959,
"file_path": "results/Huggy/Huggy/Huggy-1399959.onnx",
"reward": 3.7059742682006047,
"creation_time": 1701080542.3811812,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399959.pt"
]
},
{
"steps": 1599879,
"file_path": "results/Huggy/Huggy/Huggy-1599879.onnx",
"reward": 3.6535215459086676,
"creation_time": 1701080788.4104347,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599879.pt"
]
},
{
"steps": 1799960,
"file_path": "results/Huggy/Huggy/Huggy-1799960.onnx",
"reward": 3.6103197065266697,
"creation_time": 1701081035.1639132,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799960.pt"
]
},
{
"steps": 1999924,
"file_path": "results/Huggy/Huggy/Huggy-1999924.onnx",
"reward": 5.177924156188965,
"creation_time": 1701081281.4185772,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999924.pt"
]
},
{
"steps": 2000022,
"file_path": "results/Huggy/Huggy/Huggy-2000022.onnx",
"reward": 5.254583438237508,
"creation_time": 1701081281.5277088,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000022.pt"
]
}
],
"final_checkpoint": {
"steps": 2000022,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 5.254583438237508,
"creation_time": 1701081281.5277088,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000022.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.1+cu121"
}
}