ppo-Huggy / run_logs /training_status.json
soypablo's picture
Huggy
c93359a
{
"Huggy": {
"checkpoints": [
{
"steps": 199938,
"file_path": "results/Huggy/Huggy/Huggy-199938.onnx",
"reward": 3.568377533325782,
"creation_time": 1685281838.8078651,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199938.pt"
]
},
{
"steps": 399921,
"file_path": "results/Huggy/Huggy/Huggy-399921.onnx",
"reward": 3.6839251825886388,
"creation_time": 1685282069.816572,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399921.pt"
]
},
{
"steps": 599904,
"file_path": "results/Huggy/Huggy/Huggy-599904.onnx",
"reward": 4.674420058727264,
"creation_time": 1685282311.7374964,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599904.pt"
]
},
{
"steps": 799981,
"file_path": "results/Huggy/Huggy/Huggy-799981.onnx",
"reward": 3.8228071217620094,
"creation_time": 1685282550.8761492,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799981.pt"
]
},
{
"steps": 999955,
"file_path": "results/Huggy/Huggy/Huggy-999955.onnx",
"reward": 4.016938904459162,
"creation_time": 1685282804.8820753,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999955.pt"
]
},
{
"steps": 1199976,
"file_path": "results/Huggy/Huggy/Huggy-1199976.onnx",
"reward": 4.172088454663753,
"creation_time": 1685283056.8523548,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199976.pt"
]
},
{
"steps": 1399906,
"file_path": "results/Huggy/Huggy/Huggy-1399906.onnx",
"reward": 3.8741072714328766,
"creation_time": 1685283311.2219348,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399906.pt"
]
},
{
"steps": 1599956,
"file_path": "results/Huggy/Huggy/Huggy-1599956.onnx",
"reward": 3.9106598913061377,
"creation_time": 1685283561.640745,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599956.pt"
]
},
{
"steps": 1799419,
"file_path": "results/Huggy/Huggy/Huggy-1799419.onnx",
"reward": 3.7882886996140352,
"creation_time": 1685283809.0639124,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799419.pt"
]
},
{
"steps": 1999982,
"file_path": "results/Huggy/Huggy/Huggy-1999982.onnx",
"reward": 3.796310931444168,
"creation_time": 1685284061.4240048,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999982.pt"
]
},
{
"steps": 2000080,
"file_path": "results/Huggy/Huggy/Huggy-2000080.onnx",
"reward": 3.826264590587256,
"creation_time": 1685284061.5560782,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000080.pt"
]
}
],
"final_checkpoint": {
"steps": 2000080,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.826264590587256,
"creation_time": 1685284061.5560782,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000080.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}