Upload my updated PPO model to the hub
Browse files- README.md +1 -1
- config.json +1 -1
- ppo-LunarLander-v2.zip +2 -2
- ppo-LunarLander-v2/data +24 -24
- ppo-LunarLander-v2/policy.optimizer.pth +1 -1
- ppo-LunarLander-v2/policy.pth +1 -1
- results.json +1 -1
README.md
CHANGED
@@ -16,7 +16,7 @@ model-index:
|
|
16 |
type: LunarLander-v2
|
17 |
metrics:
|
18 |
- type: mean_reward
|
19 |
-
value:
|
20 |
name: mean_reward
|
21 |
verified: false
|
22 |
---
|
|
|
16 |
type: LunarLander-v2
|
17 |
metrics:
|
18 |
- type: mean_reward
|
19 |
+
value: 167.10 +/- 122.86
|
20 |
name: mean_reward
|
21 |
verified: false
|
22 |
---
|
config.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==", "__module__": "stable_baselines3.common.policies", "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function ActorCriticPolicy.__init__ at 0x00000235C22A8E50>", "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x00000235C22A8EE0>", "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x00000235C22A8F70>", "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x00000235C22AC040>", "_build": "<function ActorCriticPolicy._build at 0x00000235C22AC0D0>", "forward": "<function ActorCriticPolicy.forward at 0x00000235C22AC160>", "extract_features": "<function ActorCriticPolicy.extract_features at 0x00000235C22AC1F0>", "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x00000235C22AC280>", "_predict": "<function ActorCriticPolicy._predict at 0x00000235C22AC310>", "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x00000235C22AC3A0>", "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x00000235C22AC430>", "predict_values": "<function ActorCriticPolicy.predict_values at 0x00000235C22AC4C0>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc_data object at 0x00000235C22A3D50>"}, "verbose": 1, "policy_kwargs": {}, "num_timesteps": 1000448, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1695707650038113200, "learning_rate": 0.0003, "tensorboard_log": null, "_last_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVlQAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYgAAAAAAAAAJpVuT3aEoI/y65nuyIIdr7Eh5M9aPeNvQAAAAAAAAAAlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksBSwiGlIwBQ5R0lFKULg=="}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdAAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYBAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwGFlIwBQ5R0lFKULg=="}, "_last_original_obs": null, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": -0.00044800000000000395, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVQQwAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQGlR1R1oxpOMAWyUTZoBjAF0lEdAmunvI4lyBHV9lChoBkdAbG0Jiy6cy2gHTa8BaAhHQJrtpF6Rhc91fZQoaAZHQGxYXSa3I+5oB02QAWgIR0Ca7xMA3kxRdX2UKGgGR0BvRXYzzmOmaAdNswFoCEdAmvKdqtYCAHV9lChoBkdAcBUFdLQHA2gHTaABaAhHQJrz0r6LwWp1fZQoaAZHQG3jK4YrJ8xoB010AWgIR0Ca9YJIUahpdX2UKGgGR0Bs9O+ZgG8maAdNpwFoCEdAmvkPR/mT1XV9lChoBkdAcAGv3JxNqWgHTYQBaAhHQJr62/Efkmx1fZQoaAZHQG6lDAJswcpoB02FAWgIR0Ca/mZg5R0mdX2UKGgGR0Bw0EV8CxNZaAdNZgFoCEdAmv+nuRcNY3V9lChoBkdAb7Zjm0VrRGgHTX8BaAhHQJsBYdsBQvZ1fZQoaAZHQEgKIPbwjMVoB0v9aAhHQJsEIUfxMFl1fZQoaAZHQG/Kn9m6GxloB02fAWgIR0CbBWMYdhiLdX2UKGgGR0BR1m1YyO7yaAdN6ANoCEdAmwrOkk8ifXV9lChoBkdAcIT3VkMCtGgHTWsBaAhHQJsL/NPgvUV1fZQoaAZHQHBxr/jsD4hoB01yAWgIR0CbDxyPdVNpdX2UKGgGR0Bs1IfEGZ/kaAdN4wFoCEdAmxGtyDIzWXV9lChoBkdAa51r1M/QjWgHTawBaAhHQJsU+ZLIxQB1fZQoaAZHQHHMI593KSxoB02NAWgIR0CbFjVWjoIOdX2UKGgGR0BtpQOc2BJ7aAdN8AFoCEdAmxgZEx7AtXV9lChoBkdAbwy0tyxRmGgHTaoBaAhHQJsbQNlRP451fZQoaAZHQGxP+SSvC/JoB014AWgIR0CbHDwtrbg1dX2UKGgGR0Bxj+b4Ju2raAdNnAFoCEdAmx91VDKHPHV9lChoBkdAcILRUFSsKmgHTc4BaAhHQJsgz71qWTp1fZQoaAZHQGtBB6rvLHNoB02jAWgIR0CbJHmCAc1gdX2UKGgGR0Bxx5zBAOawaAdNsAFoCEdAmyY0CzTnaHV9lChoBkdAbe5EjPfKp2gHTaEBaAhHQJsnuGqPwNN1fZQoaAZHQHBKdAkcCHRoB011AWgIR0CbKpqxkd3jdX2UKGgGR0BwATR6Ww/xaAdNAAJoCEdAmywDy8SPEXV9lChoBkdAcDYGW2PT5WgHTWUBaAhHQJsu/oOhCdB1fZQoaAZHQG0UcMmWt2doB01pAmgIR0CbMMkP+XJHdX2UKGgGR0BuXu1OTJQtaAdNuwFoCEdAmzQ0F0PpZHV9lChoBkdAbqI/EfkmyGgHTcIBaAhHQJs1i/QBxPx1fZQoaAZHQGqVWD6Fds1oB00UAmgIR0CbOWEqDsdDdX2UKGgGR0Btiditq59WaAdNwQFoCEdAmzrm+0w8GXV9lChoBkdAbhzpFCswL2gHTX4BaAhHQJs8R80DU3J1fZQoaAZHQG+9fk3juKJoB03HAWgIR0CbP3dvKlpHdX2UKGgGR0BwkFkUbkwOaAdNawFoCEdAm0BssUZeiXV9lChoBkdAYe3MZgogFGgHTegDaAhHQJtFYcCHRCx1fZQoaAZHQHI4o7ihnJ1oB01lAWgIR0CbSNJYkmhNdX2UKGgGR0BwKPriVB2PaAdNvAFoCEdAm0oYkVvddnV9lChoBkdAbCQjM3ZPEmgHTXoBaAhHQJtLZw6ySmt1fZQoaAZHQHFxvnjhky1oB01KAWgIR0CbTr29tdiVdX2UKGgGR0Bsr7amGdqdaAdNgQFoCEdAm0/gKF7D23V9lChoBkdAaXe6STyJ9GgHTYoBaAhHQJtRU2hqTKV1fZQoaAZHQGBemDcuandoB03oA2gIR0CbVmBnjABUdX2UKGgGR0Bvh7987ZFoaAdNvQFoCEdAm1mz+NtIkXV9lChoBkdAbgn+x4Y772gHTXABaAhHQJta0atLcsV1fZQoaAZHQG2cwzch1T1oB019AWgIR0CbXeaNuLrHdX2UKGgGR0BsxdGViWmhaAdNfwFoCEdAm19X27FsHnV9lChoBkdAcOfaIN3GGWgHTY0BaAhHQJtg1Fd9lVd1fZQoaAZHQGv5KxC6YmdoB011AWgIR0CbY95jYqXodX2UKGgGR0BwSlHAh0QsaAdNnQFoCEdAm2UPf4yoGnV9lChoBkdAbmob+cYqG2gHTbECaAhHQJtpTshPj4p1fZQoaAZHQGyk3Kji4rloB02SAWgIR0CbanfigkC4dX2UKGgGR0Bv7raK1og3aAdNfQFoCEdAm22Rhc7henV9lChoBkdAb4VJe3QUpWgHTWEBaAhHQJtumbe/Ho51fZQoaAZHQDFVIYm9g4RoB0vQaAhHQJtvPY7JW/91fZQoaAZHQHDGm1MM7U5oB019AWgIR0CbcQU70WdmdX2UKGgGR0Bv/Sj8DSw4aAdNZgFoCEdAm3QOfRNRFnV9lChoBkdAa4lXHR1HOWgHTZMBaAhHQJt1OrlvIfd1fZQoaAZHQG3nLCFbmltoB01sAWgIR0CbeG/3FkxzdX2UKGgGR0BbgcWO6unuaAdN6ANoCEdAm32BVU+9rXV9lChoBkdAb2+E384xUWgHTZcBaAhHQJt+uURnOB11fZQoaAZHQHGc6BiCrcVoB01tAWgIR0Cbf84HX2/SdX2UKGgGR0BuutVxS5y3aAdNigFoCEdAm4MrDVH4GnV9lChoBkdAbi7rdFfAsWgHTXMBaAhHQJuEJNg0CRx1fZQoaAZHQHIzawyIpH9oB01AAWgIR0CbhQ8ZUDMedX2UKGgGR0AuWwbEP1+RaAdNIwFoCEdAm4gkP1+RYHV9lChoBkdAcHUOEdvKl2gHTWkBaAhHQJuJPp5eJHl1fZQoaAZHQGy2EiMYMv1oB02MAWgIR0CbityPdVNpdX2UKGgGR0BwYMvPC2tuaAdNiQFoCEdAm44CjxkNF3V9lChoBkdAcFPamoBJZmgHTXoBaAhHQJuPFHH3lCF1fZQoaAZHQHEtqmoBJZpoB02MAWgIR0CbkLdnCfpVdX2UKGgGR0BxA0zXSSeRaAdNwwFoCEdAm5ZCbx3FDXV9lChoBkdAbb6XzDn/1mgHTVUBaAhHQJuXW5Zr57B1fZQoaAZHQE7dX3g1m8NoB00eAWgIR0Cbmhkhib2EdX2UKGgGR0BtpKGL1mJ4aAdNrgFoCEdAm5tNGNJe3XV9lChoBkdAbu/IOH31z2gHTXMBaAhHQJucp9kSVW11fZQoaAZHQHBinlfZ26loB02NAWgIR0Cbn/e+VTrFdX2UKGgGR0BwOonpjc2zaAdNjAFoCEdAm6FBGUfPonV9lChoBkdAYVtNt65Xl2gHTegDaAhHQJumPdBSk0t1fZQoaAZHQHAEJSNwR5FoB03EAWgIR0Cbqgosqaw2dX2UKGgGR0BAWnOB19v1aAdLzWgIR0Cbqp5OafBfdX2UKGgGR0BvLIfKZDzAaAdNYwFoCEdAm6vDhxYJV3V9lChoBkdAbNq9RJmNBGgHTWgBaAhHQJutEy8BdUt1fZQoaAZHQGrOVI7Njb1oB019AWgIR0Cbr+2kzoECdX2UKGgGR0BtMWkSElE7aAdNdAFoCEdAm7Dp0jkdWHV9lChoBkdAa/Idc0Ltu2gHTbIBaAhHQJu0AAU+LWJ1fZQoaAZHQHDqR0p3HJdoB01zAWgIR0CbtPrupjtpdX2UKGgGR0Bkv+S6lLvkaAdN6ANoCEdAm7n/qkdmx3V9lChoBkdAcTs4CIUJwGgHTeIBaAhHQJu7mT7l7t11fZQoaAZHQGzKvJJXhfloB00RAmgIR0Cbvx5ylvZRdX2UKGgGR0Bsge938n/laAdNQQFoCEdAm8AZQcghbHV9lChoBkc/6QSUTtb9qGgHTXUBaAhHQJvDLGCI1tR1fZQoaAZHQHDCYfCAMDxoB03MAWgIR0CbxHgmqo60dX2UKGgGR0Bts/sNUfgaaAdNbgFoCEdAm8XD9wWFe3V9lChoBkdAcZ7EOAiFCmgHTXABaAhHQJvI3NKRMex1fZQoaAZHQDMkOAiFCcBoB000AWgIR0CbydZ7HAARdX2UKGgGR0BvR/GdZq20aAdNlQFoCEdAm8tk1l5GBnVlLg=="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 3908, "observation_space": {":type:": "<class 'gymnasium.spaces.box.Box'>", ":serialized:": "gAWVcAIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoB4wCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoECiWCAAAAAAAAAABAQEBAQEBAZRoFEsIhZRoGHSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBAoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaApLCIWUaBh0lFKUjARoaWdolGgQKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgKSwiFlGgYdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=", "dtype": "float32", "bounded_below": "[ True True True True True True True True]", "bounded_above": "[ True True True True True True True True]", "_shape": [8], "low": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "low_repr": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high_repr": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "_np_random": null}, "action_space": {":type:": "<class 'gymnasium.spaces.discrete.Discrete'>", ":serialized:": "gAWV1QAAAAAAAACMGWd5bW5hc2l1bS5zcGFjZXMuZGlzY3JldGWUjAhEaXNjcmV0ZZSTlCmBlH2UKIwBbpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCaTiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIBAAAAAAAAACUhpRSlIwFc3RhcnSUaAhoDkMIAAAAAAAAAACUhpRSlIwGX3NoYXBllCloCmgOjApfbnBfcmFuZG9tlE51Yi4=", "n": "4", "start": "0", "_shape": [], "dtype": "int64", "_np_random": null}, "n_envs": 1, "n_steps": 1024, "gamma": 0.999, "gae_lambda": 0.98, "ent_coef": 0.01, "vf_coef": 0.5, "max_grad_norm": 0.5, "batch_size": 64, "n_epochs": 4, "clip_range": {":type:": "<class 'function'>", ":serialized:": "gAWVrgIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMfGM6XFVzZXJzXGFtYnJlZW4uaGFuaWZcR2l0aHViXGRlZXByZWluZm9yY2VtZW50bGVhcm5pbmdfaHVnZ2luZ2ZhY2VcdmVudlxsaWJcc2l0ZS1wYWNrYWdlc1xzdGFibGVfYmFzZWxpbmVzM1xjb21tb25cdXRpbHMucHmUjARmdW5jlEuEQwIAAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UaAx1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgefZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/yZmZmZmZmoWUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="}, "clip_range_vf": null, "normalize_advantage": true, "target_kl": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVrgIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMfGM6XFVzZXJzXGFtYnJlZW4uaGFuaWZcR2l0aHViXGRlZXByZWluZm9yY2VtZW50bGVhcm5pbmdfaHVnZ2luZ2ZhY2VcdmVudlxsaWJcc2l0ZS1wYWNrYWdlc1xzdGFibGVfYmFzZWxpbmVzM1xjb21tb25cdXRpbHMucHmUjARmdW5jlEuEQwIAAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UaAx1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgefZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/M6kqMFUyYYWUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="}, "system_info": {"OS": "Windows-10-10.0.19041-SP0 10.0.19041", "Python": "3.8.0", "Stable-Baselines3": "2.0.0a5", "PyTorch": "2.0.1+cpu", "GPU Enabled": "False", "Numpy": "1.24.4", "Cloudpickle": "2.2.1", "Gymnasium": "0.28.1"}}
|
|
|
1 |
+
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==", "__module__": "stable_baselines3.common.policies", "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function ActorCriticPolicy.__init__ at 0x0000023F6EB680D0>", "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x0000023F6EB68160>", "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x0000023F6EB681F0>", "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x0000023F6EB68280>", "_build": "<function ActorCriticPolicy._build at 0x0000023F6EB68310>", "forward": "<function ActorCriticPolicy.forward at 0x0000023F6EB683A0>", "extract_features": "<function ActorCriticPolicy.extract_features at 0x0000023F6EB68430>", "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x0000023F6EB684C0>", "_predict": "<function ActorCriticPolicy._predict at 0x0000023F6EB68550>", "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x0000023F6EB685E0>", "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x0000023F6EB68670>", "predict_values": "<function ActorCriticPolicy.predict_values at 0x0000023F6EB68700>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc_data object at 0x0000023F6EB66210>"}, "verbose": 1, "policy_kwargs": {}, "num_timesteps": 1120000, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1695762737737517200, "learning_rate": 0.0003, "tensorboard_log": null, "_last_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYAAgAAAAAAAPPGxj7IIpy8y46DPTksSz1FfIO9qg57PQAAAAAAAAAAqja1PimOGDuO4NW78KiovhZhiDxYUe87AAAAAAAAAABAjNO9vaiFP8H0K74JddW+8QYEvvJFhL0AAAAAAAAAAD2v5z6oo8c+Ap0mvKYeq750Gxs9KDzgvQAAAAAAAAAAusWUPqnAMLw9/7M8VnE7uphvk72nrQK7AACAPwAAgD/NKng8c+eyP74bRD+CJJ2+3SuOvN7bL74AAAAAAAAAACZWDj4MsI4/AzPmPbIO5b7RGSs+9QqYOwAAAAAAAAAAMxTQPXskl7oRsIw7EACMPcF+u7ve+yO8AACAPwAAgD/KIvE+KtatvZhD+Dxf7U67yA1XPv3y4TwAAIA/AACAP+105b7qZLa9ZuylPevGqzxhcGS+NJAQPgAAgD8AAIA/2hzePbiW97mGK3o8RUY4vNtiprqOAyK9AACAPwAAgD8zaqm8e7T+OZceID3VjU48qbMivPj0Ab0AAIA/AAAAAFpFxb1c6xe6S1tovGe2Tjusr1A7gw81PAAAgD8AAIA/agezvuJ057262tq90IZEvpbz9z54wPs+AACAPwAAgD/zMuE9ezSmOQj8YjxPsDU2laRuu0VaSjUAAIA/AACAP4uLFD+NCjC+LlR1PlzO1Dw16ZC9fvkLvQAAgD8AAIA/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksQSwiGlIwBQ5R0lFKULg=="}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVgwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSxCFlIwBQ5R0lFKULg=="}, "_last_original_obs": null, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": -0.1200000000000001, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWV/AsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHwEQqoKD0166MAWyUS6WMAXSUR0CURWIJqqOtdX2UKGgGR0BWlfIsAeaKaAdN6ANoCEdAlEahhx5s03V9lChoBkdAPY6vRqoIfWgHS4toCEdAlEiaaG5+Y3V9lChoBkfAcO/N1QqI8GgHTawCaAhHQJRKoLF4s3B1fZQoaAZHQECFqSowVTJoB0uvaAhHQJRKy2v0ROF1fZQoaAZHQEUh7v5P/JhoB0vbaAhHQJRMMkD6nBN1fZQoaAZHwDSN3V09yLhoB0vHaAhHQJRN/KxLTQV1fZQoaAZHP+Z8XN1QqI9oB0txaAhHQJRPY7wKBup1fZQoaAZHwCDGXVsk6cRoB0u9aAhHQJRSL3TNMXd1fZQoaAZHQFIsZ6D5CWxoB03oA2gIR0CUUjB6a9bpdX2UKGgGR8A4iUNayKNyaAdL4mgIR0CUU0FS88LbdX2UKGgGR0A2bhHskY4yaAdLlGgIR0CUU98BMi8ndX2UKGgGR0BP7+4kNWluaAdN6ANoCEdAlFUD1PFefXV9lChoBkc/8pymygPEsWgHTegDaAhHQJRV/K6nR9h1fZQoaAZHQDqaBRQ79ydoB0ugaAhHQJRXx+1Bt1p1fZQoaAZHwCyfwsoUi6hoB0vNaAhHQJRYo+KTB691fZQoaAZHQEd6N3np0OpoB03oA2gIR0CUWVDR+jM3dX2UKGgGR0Aez8FY+0PZaAdLfGgIR0CUWVePJaJRdX2UKGgGR0AgeReTmnwYaAdLiWgIR0CUWfsZYPoWdX2UKGgGR0Afnhisny/caAdLxGgIR0CUWvGwRoRJdX2UKGgGR0BCttX5nDiwaAdLm2gIR0CUXMJAMUh3dX2UKGgGR0BLgshgVoHtaAdLo2gIR0CUXc7dznzQdX2UKGgGR0BA+Y8U21lYaAdLnGgIR0CUXmGKhtcfdX2UKGgGR0BBOKLKmsNlaAdN6ANoCEdAlGBweV9nb3V9lChoBkfAM5FBY3eenWgHS49oCEdAlGCF+/gzg3V9lChoBkfAQjPE0iyIHmgHS7toCEdAlGD9CE6DG3V9lChoBkfANZtRBNVR12gHS5BoCEdAlGFmwV0tAnV9lChoBkdAR6ecriEQG2gHTegDaAhHQJRh7jENvwV1fZQoaAZHQEC+8lHBk7RoB0uFaAhHQJRjemZVn291fZQoaAZHv/10G/vfCQ9oB0t2aAhHQJRjmhsZYPp1fZQoaAZHQAJ2+GoJiRZoB0tvaAhHQJRlczAN5MV1fZQoaAZHQDY6oOx0MgFoB0uwaAhHQJRmwglnh891fZQoaAZHP86i/O+qR2doB0twaAhHQJRpNLsa86F1fZQoaAZHQDwiIj4YaYNoB0uraAhHQJRqL5pJwsJ1fZQoaAZHQDhAUg0TDfpoB0utaAhHQJRsnDNyHVR1fZQoaAZHQDYloTPBzmxoB0ubaAhHQJRuTl+3H7x1fZQoaAZHwBEY0ygwoLJoB0tXaAhHQJRueWPcSGt1fZQoaAZHQFkyTuv2XcBoB03oA2gIR0CUbznmaH9FdX2UKGgGR7/KiGFi8WbgaAdN6ANoCEdAlG9ZpWV/t3V9lChoBkfAIg55AyEcsGgHS5ZoCEdAlHLror4FinV9lChoBkdARan0I1LrX2gHTegDaAhHQJRzovexfOV1fZQoaAZHwBdV3+uNgjRoB0tuaAhHQJR0z0Cih391fZQoaAZHwA+XJo0ygwpoB0uBaAhHQJR2s0fozN51fZQoaAZHQECsad+XqqxoB0ukaAhHQJR3ymP5pJx1fZQoaAZHQDHvY4ACGN9oB0u8aAhHQJR59G5MDfZ1fZQoaAZHQDsGYKIBRyhoB0t6aAhHQJR6L2alUId1fZQoaAZHQCT2h/RVp9JoB0uRaAhHQJR9RY7q6e51fZQoaAZHwEDdHggow25oB0ulaAhHQJR9dEa2nbZ1fZQoaAZHQBLQUxmCiAVoB0u4aAhHQJSBxzbN8md1fZQoaAZHQA40L+glF+doB0unaAhHQJSB8jY7JXB1fZQoaAZHwElcNbTtsvZoB0tvaAhHQJSEaxeLNwB1fZQoaAZHQD6JLlFMIu5oB0uuaAhHQJSEradtl7N1fZQoaAZHQFf07Ikqto1oB03oA2gIR0CUhNF9roGIdX2UKGgGR0AuO/ATIvJzaAdLlGgIR0CUhm5kK/mDdX2UKGgGR0A0UW/rSmZWaAdLdmgIR0CUiXICU5dXdX2UKGgGR0BAEqGUOd5IaAdLrGgIR0CUjKF6Rhc8dX2UKGgGR8BvbM45tFa0aAdNlwJoCEdAlI7dRzijtXV9lChoBkdANWdpM6BAfWgHS7RoCEdAlJBSQHRkVnV9lChoBkfACjqASWZ7X2gHS6doCEdAlJFVeOXE63V9lChoBkdAWQ+8rZrYXmgHTegDaAhHQJSU49FF2FF1fZQoaAZHQEiqjHGS6lNoB03oA2gIR0CUlPY/Vy3kdX2UKGgGR0BhZDLhaTwEaAdN6ANoCEdAlJW0uHvc8HV9lChoBkdAOVL4rSVnmWgHS8BoCEdAlJYP2GqPwXV9lChoBkdAUHXL9uP3jGgHTegDaAhHQJSWelfqoqF1fZQoaAZHwF1TqzqrzXloB03QAmgIR0CUmIaiKziTdX2UKGgGR0ADcZ3s5XEJaAdLuWgIR0CUmay/sVtXdX2UKGgGR8BCvHUDuBtlaAdLXWgIR0CUmgG96C17dX2UKGgGR0BKDSn1nM+vaAdN6ANoCEdAlJpFUEPlMnV9lChoBkdAV9VCAtnPFGgHTegDaAhHQJSauoegctJ1fZQoaAZHQFKjUFB6a9doB03oA2gIR0CUmwDArQPadX2UKGgGR0Az72wV0tAcaAdLtmgIR0CUnHmVJL/TdX2UKGgGR8BJd5le4TbnaAdL0WgIR0CUnWTKkl/pdX2UKGgGR0AsLeMQ2/BWaAdLtWgIR0CUnXAOJ+DwdX2UKGgGR0ACHx6OYIBzaAdLkWgIR0CUnx73PAwgdX2UKGgGR8A0CLQHAymAaAdLvmgIR0CUoJ6u4gA7dX2UKGgGR8BF0WIGhVU/aAdLyWgIR0CUoMadc0LudX2UKGgGR0AlGDSw4bS7aAdLq2gIR0CUpC0u14PgdX2UKGgGR8BFBwg1WKdhaAdL0WgIR0CUphEH+qBFdX2UKGgGR8A4dqlgtvn9aAdLvmgIR0CUp3hqj8DTdX2UKGgGR0BEd1KoQ4CIaAdLsWgIR0CUqNhIvrWzdX2UKGgGR8AzG3pOerdWaAdLXWgIR0CUqNU1Q66rdX2UKGgGR0A4oYPXkHUuaAdLtGgIR0CUqTBCUorndX2UKGgGR8BThmIKtxMnaAdNPgFoCEdAlKo50fYBeXV9lChoBkfAKbhpxm03O2gHS1xoCEdAlKxNYKYzBXV9lChoBkdARDiioKlYU2gHTegDaAhHQJSu0L5RCQd1fZQoaAZHwBjHf2saKk5oB0uBaAhHQJSvukfs/pt1fZQoaAZHQBIQmAskIHFoB0u4aAhHQJSvwoScslN1fZQoaAZHQBPv+0gKWs1oB0uwaAhHQJS19i3G4qh1fZQoaAZHQDWG3RXwLE1oB0uMaAhHQJS3vW07bL51fZQoaAZHQEF1E87p3X9oB03oA2gIR0CUuDxvegtfdX2UKGgGR0A4REdeY2KmaAdN6ANoCEdAlLh78zhxYXV9lChoBkdAG33MY/FBIGgHS6xoCEdAlLmVK02LpHV9lChoBkdAK+SRB/qgRWgHS8poCEdAlLpHX/YJ3XV9lChoBkdAKsBa1TisGWgHS11oCEdAlL3SsS00FnV9lChoBkfALQiBXjlxO2gHS45oCEdAlL5itq59VnV9lChoBkdAVckvRJEpiWgHTegDaAhHQJS/wRpUPxx1fZQoaAZHQEBXPGACnxdoB0uHaAhHQJTAFR64Uex1fZQoaAZHQEHWUfPomoloB03oA2gIR0CUwuzBhx5tdX2UKGgGR0BZlW+sYEW7aAdN6ANoCEdAlMPF4oqkM3V9lChoBkdAMlvx6OYIB2gHS3VoCEdAlMaHxJ/XoXVlLg=="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 70, "observation_space": {":type:": "<class 'gymnasium.spaces.box.Box'>", ":serialized:": "gAWVcAIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoB4wCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoECiWCAAAAAAAAAABAQEBAQEBAZRoFEsIhZRoGHSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBAoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaApLCIWUaBh0lFKUjARoaWdolGgQKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgKSwiFlGgYdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=", "dtype": "float32", "bounded_below": "[ True True True True True True True True]", "bounded_above": "[ True True True True True True True True]", "_shape": [8], "low": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "low_repr": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high_repr": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "_np_random": null}, "action_space": {":type:": "<class 'gymnasium.spaces.discrete.Discrete'>", ":serialized:": "gAWV1QAAAAAAAACMGWd5bW5hc2l1bS5zcGFjZXMuZGlzY3JldGWUjAhEaXNjcmV0ZZSTlCmBlH2UKIwBbpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCaTiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIBAAAAAAAAACUhpRSlIwFc3RhcnSUaAhoDkMIAAAAAAAAAACUhpRSlIwGX3NoYXBllCloCmgOjApfbnBfcmFuZG9tlE51Yi4=", "n": "4", "start": "0", "_shape": [], "dtype": "int64", "_np_random": null}, "n_envs": 16, "n_steps": 10000, "gamma": 0.999, "gae_lambda": 0.98, "ent_coef": 0.001, "vf_coef": 0.5, "max_grad_norm": 0.5, "batch_size": 64, "n_epochs": 10, "clip_range": {":type:": "<class 'function'>", ":serialized:": "gAWVrgIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMfGM6XFVzZXJzXGFtYnJlZW4uaGFuaWZcR2l0aHViXGRlZXByZWluZm9yY2VtZW50bGVhcm5pbmdfaHVnZ2luZ2ZhY2VcdmVudlxsaWJcc2l0ZS1wYWNrYWdlc1xzdGFibGVfYmFzZWxpbmVzM1xjb21tb25cdXRpbHMucHmUjARmdW5jlEuEQwIAAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UaAx1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgefZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/yZmZmZmZmoWUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="}, "clip_range_vf": null, "normalize_advantage": true, "target_kl": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVrgIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMfGM6XFVzZXJzXGFtYnJlZW4uaGFuaWZcR2l0aHViXGRlZXByZWluZm9yY2VtZW50bGVhcm5pbmdfaHVnZ2luZ2ZhY2VcdmVudlxsaWJcc2l0ZS1wYWNrYWdlc1xzdGFibGVfYmFzZWxpbmVzM1xjb21tb25cdXRpbHMucHmUjARmdW5jlEuEQwIAAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UaAx1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgefZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/M6kqMFUyYYWUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="}, "system_info": {"OS": "Windows-10-10.0.19041-SP0 10.0.19041", "Python": "3.8.0", "Stable-Baselines3": "2.0.0a5", "PyTorch": "2.0.1+cpu", "GPU Enabled": "False", "Numpy": "1.24.4", "Cloudpickle": "2.2.1", "Gymnasium": "0.28.1"}}
|
ppo-LunarLander-v2.zip
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f834e34487023a055fabb272c72879f57c378a5211767d28c1316ba1ec2ac445
|
3 |
+
size 146080
|
ppo-LunarLander-v2/data
CHANGED
@@ -4,54 +4,54 @@
|
|
4 |
":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==",
|
5 |
"__module__": "stable_baselines3.common.policies",
|
6 |
"__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
-
"__init__": "<function ActorCriticPolicy.__init__ at
|
8 |
-
"_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at
|
9 |
-
"reset_noise": "<function ActorCriticPolicy.reset_noise at
|
10 |
-
"_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at
|
11 |
-
"_build": "<function ActorCriticPolicy._build at
|
12 |
-
"forward": "<function ActorCriticPolicy.forward at
|
13 |
-
"extract_features": "<function ActorCriticPolicy.extract_features at
|
14 |
-
"_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at
|
15 |
-
"_predict": "<function ActorCriticPolicy._predict at
|
16 |
-
"evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at
|
17 |
-
"get_distribution": "<function ActorCriticPolicy.get_distribution at
|
18 |
-
"predict_values": "<function ActorCriticPolicy.predict_values at
|
19 |
"__abstractmethods__": "frozenset()",
|
20 |
-
"_abc_impl": "<_abc_data object at
|
21 |
},
|
22 |
"verbose": 1,
|
23 |
"policy_kwargs": {},
|
24 |
-
"num_timesteps":
|
25 |
"_total_timesteps": 1000000,
|
26 |
"_num_timesteps_at_start": 0,
|
27 |
"seed": null,
|
28 |
"action_noise": null,
|
29 |
-
"start_time":
|
30 |
"learning_rate": 0.0003,
|
31 |
"tensorboard_log": null,
|
32 |
"_last_obs": {
|
33 |
":type:": "<class 'numpy.ndarray'>",
|
34 |
-
":serialized:": "
|
35 |
},
|
36 |
"_last_episode_starts": {
|
37 |
":type:": "<class 'numpy.ndarray'>",
|
38 |
-
":serialized:": "
|
39 |
},
|
40 |
"_last_original_obs": null,
|
41 |
"_episode_num": 0,
|
42 |
"use_sde": false,
|
43 |
"sde_sample_freq": -1,
|
44 |
-
"_current_progress_remaining": -0.
|
45 |
"_stats_window_size": 100,
|
46 |
"ep_info_buffer": {
|
47 |
":type:": "<class 'collections.deque'>",
|
48 |
-
":serialized:": "
|
49 |
},
|
50 |
"ep_success_buffer": {
|
51 |
":type:": "<class 'collections.deque'>",
|
52 |
":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
|
53 |
},
|
54 |
-
"_n_updates":
|
55 |
"observation_space": {
|
56 |
":type:": "<class 'gymnasium.spaces.box.Box'>",
|
57 |
":serialized:": "gAWVcAIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoB4wCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoECiWCAAAAAAAAAABAQEBAQEBAZRoFEsIhZRoGHSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBAoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaApLCIWUaBh0lFKUjARoaWdolGgQKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgKSwiFlGgYdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=",
|
@@ -76,15 +76,15 @@
|
|
76 |
"dtype": "int64",
|
77 |
"_np_random": null
|
78 |
},
|
79 |
-
"n_envs":
|
80 |
-
"n_steps":
|
81 |
"gamma": 0.999,
|
82 |
"gae_lambda": 0.98,
|
83 |
-
"ent_coef": 0.
|
84 |
"vf_coef": 0.5,
|
85 |
"max_grad_norm": 0.5,
|
86 |
"batch_size": 64,
|
87 |
-
"n_epochs":
|
88 |
"clip_range": {
|
89 |
":type:": "<class 'function'>",
|
90 |
":serialized:": "gAWVrgIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMfGM6XFVzZXJzXGFtYnJlZW4uaGFuaWZcR2l0aHViXGRlZXByZWluZm9yY2VtZW50bGVhcm5pbmdfaHVnZ2luZ2ZhY2VcdmVudlxsaWJcc2l0ZS1wYWNrYWdlc1xzdGFibGVfYmFzZWxpbmVzM1xjb21tb25cdXRpbHMucHmUjARmdW5jlEuEQwIAAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UaAx1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgefZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/yZmZmZmZmoWUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="
|
|
|
4 |
":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==",
|
5 |
"__module__": "stable_baselines3.common.policies",
|
6 |
"__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
+
"__init__": "<function ActorCriticPolicy.__init__ at 0x0000023F6EB680D0>",
|
8 |
+
"_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x0000023F6EB68160>",
|
9 |
+
"reset_noise": "<function ActorCriticPolicy.reset_noise at 0x0000023F6EB681F0>",
|
10 |
+
"_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x0000023F6EB68280>",
|
11 |
+
"_build": "<function ActorCriticPolicy._build at 0x0000023F6EB68310>",
|
12 |
+
"forward": "<function ActorCriticPolicy.forward at 0x0000023F6EB683A0>",
|
13 |
+
"extract_features": "<function ActorCriticPolicy.extract_features at 0x0000023F6EB68430>",
|
14 |
+
"_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x0000023F6EB684C0>",
|
15 |
+
"_predict": "<function ActorCriticPolicy._predict at 0x0000023F6EB68550>",
|
16 |
+
"evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x0000023F6EB685E0>",
|
17 |
+
"get_distribution": "<function ActorCriticPolicy.get_distribution at 0x0000023F6EB68670>",
|
18 |
+
"predict_values": "<function ActorCriticPolicy.predict_values at 0x0000023F6EB68700>",
|
19 |
"__abstractmethods__": "frozenset()",
|
20 |
+
"_abc_impl": "<_abc_data object at 0x0000023F6EB66210>"
|
21 |
},
|
22 |
"verbose": 1,
|
23 |
"policy_kwargs": {},
|
24 |
+
"num_timesteps": 1120000,
|
25 |
"_total_timesteps": 1000000,
|
26 |
"_num_timesteps_at_start": 0,
|
27 |
"seed": null,
|
28 |
"action_noise": null,
|
29 |
+
"start_time": 1695762737737517200,
|
30 |
"learning_rate": 0.0003,
|
31 |
"tensorboard_log": null,
|
32 |
"_last_obs": {
|
33 |
":type:": "<class 'numpy.ndarray'>",
|
34 |
+
":serialized:": "gAWVdQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYAAgAAAAAAAPPGxj7IIpy8y46DPTksSz1FfIO9qg57PQAAAAAAAAAAqja1PimOGDuO4NW78KiovhZhiDxYUe87AAAAAAAAAABAjNO9vaiFP8H0K74JddW+8QYEvvJFhL0AAAAAAAAAAD2v5z6oo8c+Ap0mvKYeq750Gxs9KDzgvQAAAAAAAAAAusWUPqnAMLw9/7M8VnE7uphvk72nrQK7AACAPwAAgD/NKng8c+eyP74bRD+CJJ2+3SuOvN7bL74AAAAAAAAAACZWDj4MsI4/AzPmPbIO5b7RGSs+9QqYOwAAAAAAAAAAMxTQPXskl7oRsIw7EACMPcF+u7ve+yO8AACAPwAAgD/KIvE+KtatvZhD+Dxf7U67yA1XPv3y4TwAAIA/AACAP+105b7qZLa9ZuylPevGqzxhcGS+NJAQPgAAgD8AAIA/2hzePbiW97mGK3o8RUY4vNtiprqOAyK9AACAPwAAgD8zaqm8e7T+OZceID3VjU48qbMivPj0Ab0AAIA/AAAAAFpFxb1c6xe6S1tovGe2Tjusr1A7gw81PAAAgD8AAIA/agezvuJ057262tq90IZEvpbz9z54wPs+AACAPwAAgD/zMuE9ezSmOQj8YjxPsDU2laRuu0VaSjUAAIA/AACAP4uLFD+NCjC+LlR1PlzO1Dw16ZC9fvkLvQAAgD8AAIA/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksQSwiGlIwBQ5R0lFKULg=="
|
35 |
},
|
36 |
"_last_episode_starts": {
|
37 |
":type:": "<class 'numpy.ndarray'>",
|
38 |
+
":serialized:": "gAWVgwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSxCFlIwBQ5R0lFKULg=="
|
39 |
},
|
40 |
"_last_original_obs": null,
|
41 |
"_episode_num": 0,
|
42 |
"use_sde": false,
|
43 |
"sde_sample_freq": -1,
|
44 |
+
"_current_progress_remaining": -0.1200000000000001,
|
45 |
"_stats_window_size": 100,
|
46 |
"ep_info_buffer": {
|
47 |
":type:": "<class 'collections.deque'>",
|
48 |
+
":serialized:": "gAWV/AsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHwEQqoKD0166MAWyUS6WMAXSUR0CURWIJqqOtdX2UKGgGR0BWlfIsAeaKaAdN6ANoCEdAlEahhx5s03V9lChoBkdAPY6vRqoIfWgHS4toCEdAlEiaaG5+Y3V9lChoBkfAcO/N1QqI8GgHTawCaAhHQJRKoLF4s3B1fZQoaAZHQECFqSowVTJoB0uvaAhHQJRKy2v0ROF1fZQoaAZHQEUh7v5P/JhoB0vbaAhHQJRMMkD6nBN1fZQoaAZHwDSN3V09yLhoB0vHaAhHQJRN/KxLTQV1fZQoaAZHP+Z8XN1QqI9oB0txaAhHQJRPY7wKBup1fZQoaAZHwCDGXVsk6cRoB0u9aAhHQJRSL3TNMXd1fZQoaAZHQFIsZ6D5CWxoB03oA2gIR0CUUjB6a9bpdX2UKGgGR8A4iUNayKNyaAdL4mgIR0CUU0FS88LbdX2UKGgGR0A2bhHskY4yaAdLlGgIR0CUU98BMi8ndX2UKGgGR0BP7+4kNWluaAdN6ANoCEdAlFUD1PFefXV9lChoBkc/8pymygPEsWgHTegDaAhHQJRV/K6nR9h1fZQoaAZHQDqaBRQ79ydoB0ugaAhHQJRXx+1Bt1p1fZQoaAZHwCyfwsoUi6hoB0vNaAhHQJRYo+KTB691fZQoaAZHQEd6N3np0OpoB03oA2gIR0CUWVDR+jM3dX2UKGgGR0Aez8FY+0PZaAdLfGgIR0CUWVePJaJRdX2UKGgGR0AgeReTmnwYaAdLiWgIR0CUWfsZYPoWdX2UKGgGR0Afnhisny/caAdLxGgIR0CUWvGwRoRJdX2UKGgGR0BCttX5nDiwaAdLm2gIR0CUXMJAMUh3dX2UKGgGR0BLgshgVoHtaAdLo2gIR0CUXc7dznzQdX2UKGgGR0BA+Y8U21lYaAdLnGgIR0CUXmGKhtcfdX2UKGgGR0BBOKLKmsNlaAdN6ANoCEdAlGBweV9nb3V9lChoBkfAM5FBY3eenWgHS49oCEdAlGCF+/gzg3V9lChoBkfAQjPE0iyIHmgHS7toCEdAlGD9CE6DG3V9lChoBkfANZtRBNVR12gHS5BoCEdAlGFmwV0tAnV9lChoBkdAR6ecriEQG2gHTegDaAhHQJRh7jENvwV1fZQoaAZHQEC+8lHBk7RoB0uFaAhHQJRjemZVn291fZQoaAZHv/10G/vfCQ9oB0t2aAhHQJRjmhsZYPp1fZQoaAZHQAJ2+GoJiRZoB0tvaAhHQJRlczAN5MV1fZQoaAZHQDY6oOx0MgFoB0uwaAhHQJRmwglnh891fZQoaAZHP86i/O+qR2doB0twaAhHQJRpNLsa86F1fZQoaAZHQDwiIj4YaYNoB0uraAhHQJRqL5pJwsJ1fZQoaAZHQDhAUg0TDfpoB0utaAhHQJRsnDNyHVR1fZQoaAZHQDYloTPBzmxoB0ubaAhHQJRuTl+3H7x1fZQoaAZHwBEY0ygwoLJoB0tXaAhHQJRueWPcSGt1fZQoaAZHQFkyTuv2XcBoB03oA2gIR0CUbznmaH9FdX2UKGgGR7/KiGFi8WbgaAdN6ANoCEdAlG9ZpWV/t3V9lChoBkfAIg55AyEcsGgHS5ZoCEdAlHLror4FinV9lChoBkdARan0I1LrX2gHTegDaAhHQJRzovexfOV1fZQoaAZHwBdV3+uNgjRoB0tuaAhHQJR0z0Cih391fZQoaAZHwA+XJo0ygwpoB0uBaAhHQJR2s0fozN51fZQoaAZHQECsad+XqqxoB0ukaAhHQJR3ymP5pJx1fZQoaAZHQDHvY4ACGN9oB0u8aAhHQJR59G5MDfZ1fZQoaAZHQDsGYKIBRyhoB0t6aAhHQJR6L2alUId1fZQoaAZHQCT2h/RVp9JoB0uRaAhHQJR9RY7q6e51fZQoaAZHwEDdHggow25oB0ulaAhHQJR9dEa2nbZ1fZQoaAZHQBLQUxmCiAVoB0u4aAhHQJSBxzbN8md1fZQoaAZHQA40L+glF+doB0unaAhHQJSB8jY7JXB1fZQoaAZHwElcNbTtsvZoB0tvaAhHQJSEaxeLNwB1fZQoaAZHQD6JLlFMIu5oB0uuaAhHQJSEradtl7N1fZQoaAZHQFf07Ikqto1oB03oA2gIR0CUhNF9roGIdX2UKGgGR0AuO/ATIvJzaAdLlGgIR0CUhm5kK/mDdX2UKGgGR0A0UW/rSmZWaAdLdmgIR0CUiXICU5dXdX2UKGgGR0BAEqGUOd5IaAdLrGgIR0CUjKF6Rhc8dX2UKGgGR8BvbM45tFa0aAdNlwJoCEdAlI7dRzijtXV9lChoBkdANWdpM6BAfWgHS7RoCEdAlJBSQHRkVnV9lChoBkfACjqASWZ7X2gHS6doCEdAlJFVeOXE63V9lChoBkdAWQ+8rZrYXmgHTegDaAhHQJSU49FF2FF1fZQoaAZHQEiqjHGS6lNoB03oA2gIR0CUlPY/Vy3kdX2UKGgGR0BhZDLhaTwEaAdN6ANoCEdAlJW0uHvc8HV9lChoBkdAOVL4rSVnmWgHS8BoCEdAlJYP2GqPwXV9lChoBkdAUHXL9uP3jGgHTegDaAhHQJSWelfqoqF1fZQoaAZHwF1TqzqrzXloB03QAmgIR0CUmIaiKziTdX2UKGgGR0ADcZ3s5XEJaAdLuWgIR0CUmay/sVtXdX2UKGgGR8BCvHUDuBtlaAdLXWgIR0CUmgG96C17dX2UKGgGR0BKDSn1nM+vaAdN6ANoCEdAlJpFUEPlMnV9lChoBkdAV9VCAtnPFGgHTegDaAhHQJSauoegctJ1fZQoaAZHQFKjUFB6a9doB03oA2gIR0CUmwDArQPadX2UKGgGR0Az72wV0tAcaAdLtmgIR0CUnHmVJL/TdX2UKGgGR8BJd5le4TbnaAdL0WgIR0CUnWTKkl/pdX2UKGgGR0AsLeMQ2/BWaAdLtWgIR0CUnXAOJ+DwdX2UKGgGR0ACHx6OYIBzaAdLkWgIR0CUnx73PAwgdX2UKGgGR8A0CLQHAymAaAdLvmgIR0CUoJ6u4gA7dX2UKGgGR8BF0WIGhVU/aAdLyWgIR0CUoMadc0LudX2UKGgGR0AlGDSw4bS7aAdLq2gIR0CUpC0u14PgdX2UKGgGR8BFBwg1WKdhaAdL0WgIR0CUphEH+qBFdX2UKGgGR8A4dqlgtvn9aAdLvmgIR0CUp3hqj8DTdX2UKGgGR0BEd1KoQ4CIaAdLsWgIR0CUqNhIvrWzdX2UKGgGR8AzG3pOerdWaAdLXWgIR0CUqNU1Q66rdX2UKGgGR0A4oYPXkHUuaAdLtGgIR0CUqTBCUorndX2UKGgGR8BThmIKtxMnaAdNPgFoCEdAlKo50fYBeXV9lChoBkfAKbhpxm03O2gHS1xoCEdAlKxNYKYzBXV9lChoBkdARDiioKlYU2gHTegDaAhHQJSu0L5RCQd1fZQoaAZHwBjHf2saKk5oB0uBaAhHQJSvukfs/pt1fZQoaAZHQBIQmAskIHFoB0u4aAhHQJSvwoScslN1fZQoaAZHQBPv+0gKWs1oB0uwaAhHQJS19i3G4qh1fZQoaAZHQDWG3RXwLE1oB0uMaAhHQJS3vW07bL51fZQoaAZHQEF1E87p3X9oB03oA2gIR0CUuDxvegtfdX2UKGgGR0A4REdeY2KmaAdN6ANoCEdAlLh78zhxYXV9lChoBkdAG33MY/FBIGgHS6xoCEdAlLmVK02LpHV9lChoBkdAK+SRB/qgRWgHS8poCEdAlLpHX/YJ3XV9lChoBkdAKsBa1TisGWgHS11oCEdAlL3SsS00FnV9lChoBkfALQiBXjlxO2gHS45oCEdAlL5itq59VnV9lChoBkdAVckvRJEpiWgHTegDaAhHQJS/wRpUPxx1fZQoaAZHQEBXPGACnxdoB0uHaAhHQJTAFR64Uex1fZQoaAZHQEHWUfPomoloB03oA2gIR0CUwuzBhx5tdX2UKGgGR0BZlW+sYEW7aAdN6ANoCEdAlMPF4oqkM3V9lChoBkdAMlvx6OYIB2gHS3VoCEdAlMaHxJ/XoXVlLg=="
|
49 |
},
|
50 |
"ep_success_buffer": {
|
51 |
":type:": "<class 'collections.deque'>",
|
52 |
":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
|
53 |
},
|
54 |
+
"_n_updates": 70,
|
55 |
"observation_space": {
|
56 |
":type:": "<class 'gymnasium.spaces.box.Box'>",
|
57 |
":serialized:": "gAWVcAIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoB4wCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoECiWCAAAAAAAAAABAQEBAQEBAZRoFEsIhZRoGHSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBAoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaApLCIWUaBh0lFKUjARoaWdolGgQKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgKSwiFlGgYdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=",
|
|
|
76 |
"dtype": "int64",
|
77 |
"_np_random": null
|
78 |
},
|
79 |
+
"n_envs": 16,
|
80 |
+
"n_steps": 10000,
|
81 |
"gamma": 0.999,
|
82 |
"gae_lambda": 0.98,
|
83 |
+
"ent_coef": 0.001,
|
84 |
"vf_coef": 0.5,
|
85 |
"max_grad_norm": 0.5,
|
86 |
"batch_size": 64,
|
87 |
+
"n_epochs": 10,
|
88 |
"clip_range": {
|
89 |
":type:": "<class 'function'>",
|
90 |
":serialized:": "gAWVrgIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMfGM6XFVzZXJzXGFtYnJlZW4uaGFuaWZcR2l0aHViXGRlZXByZWluZm9yY2VtZW50bGVhcm5pbmdfaHVnZ2luZ2ZhY2VcdmVudlxsaWJcc2l0ZS1wYWNrYWdlc1xzdGFibGVfYmFzZWxpbmVzM1xjb21tb25cdXRpbHMucHmUjARmdW5jlEuEQwIAAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UaAx1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgefZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/yZmZmZmZmoWUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="
|
ppo-LunarLander-v2/policy.optimizer.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 87545
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:94bdcd8a8841bb04421f90e3fda0e09ad3088ed8e86f559d58f545e03a503ea2
|
3 |
size 87545
|
ppo-LunarLander-v2/policy.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 43201
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7f4dd59bec96b43f13c63ffa969ca1aa910a3a83153d7b6a92beb9b4ed7886c2
|
3 |
size 43201
|
results.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"mean_reward":
|
|
|
1 |
+
{"mean_reward": 167.10165951555012, "std_reward": 122.86334723849355, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2023-09-27T07:36:44.102442"}
|