IlluminatiPudding commited on
Commit
3423c11
1 Parent(s): 2bcc858

Initial commit

Browse files
README.md ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: stable-baselines3
3
+ tags:
4
+ - PandaPickAndPlace-v3
5
+ - deep-reinforcement-learning
6
+ - reinforcement-learning
7
+ - stable-baselines3
8
+ model-index:
9
+ - name: A2C
10
+ results:
11
+ - task:
12
+ type: reinforcement-learning
13
+ name: reinforcement-learning
14
+ dataset:
15
+ name: PandaPickAndPlace-v3
16
+ type: PandaPickAndPlace-v3
17
+ metrics:
18
+ - type: mean_reward
19
+ value: -45.00 +/- 15.00
20
+ name: mean_reward
21
+ verified: false
22
+ ---
23
+
24
+ # **A2C** Agent playing **PandaPickAndPlace-v3**
25
+ This is a trained model of a **A2C** agent playing **PandaPickAndPlace-v3**
26
+ using the [stable-baselines3 library](https://github.com/DLR-RM/stable-baselines3).
27
+
28
+ ## Usage (with Stable-baselines3)
29
+ TODO: Add your code
30
+
31
+
32
+ ```python
33
+ from stable_baselines3 import ...
34
+ from huggingface_sb3 import load_from_hub
35
+
36
+ ...
37
+ ```
a2c-PandaPickAndPlace-v3.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4320a15549a716ec2a2f15711e0039da390a16702e379f4c46e8c52ef8ef2743
3
+ size 124172
a2c-PandaPickAndPlace-v3/_stable_baselines3_version ADDED
@@ -0,0 +1 @@
 
 
1
+ 2.1.0
a2c-PandaPickAndPlace-v3/data ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "policy_class": {
3
+ ":type:": "<class 'abc.ABCMeta'>",
4
+ ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=",
5
+ "__module__": "stable_baselines3.common.policies",
6
+ "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
7
+ "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7c04346f31c0>",
8
+ "__abstractmethods__": "frozenset()",
9
+ "_abc_impl": "<_abc._abc_data object at 0x7c04346e78c0>"
10
+ },
11
+ "verbose": 1,
12
+ "policy_kwargs": {
13
+ ":type:": "<class 'dict'>",
14
+ ":serialized:": "gAWVgQAAAAAAAAB9lCiMD29wdGltaXplcl9jbGFzc5SME3RvcmNoLm9wdGltLnJtc3Byb3CUjAdSTVNwcm9wlJOUjBBvcHRpbWl6ZXJfa3dhcmdzlH2UKIwFYWxwaGGURz/vrhR64UeujANlcHOURz7k+LWI42jxjAx3ZWlnaHRfZGVjYXmUSwB1dS4=",
15
+ "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>",
16
+ "optimizer_kwargs": {
17
+ "alpha": 0.99,
18
+ "eps": 1e-05,
19
+ "weight_decay": 0
20
+ }
21
+ },
22
+ "num_timesteps": 1000192,
23
+ "_total_timesteps": 1000000,
24
+ "_num_timesteps_at_start": 0,
25
+ "seed": null,
26
+ "action_noise": null,
27
+ "start_time": 1699960566683586096,
28
+ "learning_rate": 0.001,
29
+ "tensorboard_log": null,
30
+ "_last_obs": {
31
+ ":type:": "<class 'collections.OrderedDict'>",
32
+ ":serialized:": "gAWViwIAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAgiG3PrBbsj5avQk+XpMdPwfnOj8GvAk+OdtYPx0zKr9avQk+SplKv2/s0j6FuQk+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAA1ILAv61RdD+DQuM/wtS/PgaXSr+j19U+uFF6PyLOvD4p6wg/mpYjP7MHrj4hYI4/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWMAEAAAAAAACmTzY/1al4vrX/Fb9nZeM+wJjHPxuonj2hw7k/giG3PrBbsj5avQk+v8muvHz3Crz13oe8IfAcPeChuLxeYnA9pFYNPCEOg7zWE3I7XlwWP8ELEz9WkZ6+NtwuP/cZED/1W7g/K65Iv16THT8H5zo/BrwJPgN+rrwhuAi8dr6UvHT3Gz0Qhbu8XmJwPZJWDTwpDoO8rfJVO+5qBz91AS8/3a0Qv6+lSz5ZcTU+oX2NPd2wSL8521g/HTMqv1q9CT4l8q28M6gNvAa3o7zdwho9pUm8vF5icD2jVg08IA6DvEEQQjs4AQE90ocgv9UvF78+8ES/qnwDwHxrSj0lIAO/SplKv2/s0j6FuQk+a1GsvCZhC7wpe5O8cewaPZXKurxeYnA9k1YNPCkOg7wz9VQ7lGgOSwRLE4aUaBJ0lFKUdS4=",
33
+ "achieved_goal": "[[ 0.35767752 0.34835577 0.13451138]\n [ 0.6155299 0.7300877 0.13450631]\n [ 0.8470951 -0.6648424 0.13451138]\n [-0.7914015 0.4119601 0.13449676]]",
34
+ "desired_goal": "[[-1.5039926 0.9543713 1.7754673 ]\n [ 0.3746701 -0.79136693 0.4176608 ]\n [ 0.9778094 0.36876017 0.53483826]\n [ 0.63901675 0.3399025 1.1123086 ]]",
35
+ "observation": "[[ 0.71215284 -0.24283536 -0.585933 0.444133 1.5593491 0.07746907\n 1.4512826 0.35767752 0.34835577 0.13451138 -0.02133643 -0.00848186\n -0.01658581 0.03831494 -0.02253813 0.05868756 0.00862661 -0.01599795\n 0.00369381]\n [ 0.5873469 0.5743981 -0.30970258 0.68304765 0.5628962 1.4403063\n -0.7839076 0.6155299 0.7300877 0.13450631 -0.02130032 -0.00834468\n -0.01815723 0.03807779 -0.0228906 0.05868756 0.0086266 -0.01599796\n 0.00326459]\n [ 0.52897537 0.683616 -0.56515294 0.19887422 0.1771902 0.06908727\n -0.7839487 0.8470951 -0.6648424 0.13451138 -0.02123363 -0.00864606\n -0.01998473 0.03778349 -0.02298434 0.05868756 0.00862661 -0.01599795\n 0.00296117]\n [ 0.0314953 -0.62707245 -0.5905736 -0.7692908 -2.054484 0.04941891\n -0.51220924 -0.7914015 0.4119601 0.13449676 -0.02103492 -0.00850705\n -0.01800306 0.03782314 -0.02280168 0.05868756 0.0086266 -0.01599796\n 0.00324948]]"
36
+ },
37
+ "_last_episode_starts": {
38
+ ":type:": "<class 'numpy.ndarray'>",
39
+ ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="
40
+ },
41
+ "_last_original_obs": {
42
+ ":type:": "<class 'collections.OrderedDict'>",
43
+ ":serialized:": "gAWViwIAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAABxcQvgql1T0K16M8Kg40PTfLtL0K16M8j2z0OorK7zsK16M8xzOjPdu3wLwK16M8lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAdOCdvYBghT0K16M8uRVLvSKN1Lz7Kxg9hi0PPoxQh70K16M8fK8SPtdY7jwK16M8lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWMAEAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAAAAAAAABxcQvgql1T0K16M8AAAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA6nIdPRlsGqxDI0o+AAAAAAAAAIAAAAAAAAAAACoOND03y7S9CtejPAAAAAAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOpyHT0ZbBqsQyNKPgAAAAAAAACAAAAAAAAAAACPbPQ6isrvOwrXozwAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAAAAAAAAxzOjPdu3wLwK16M8AAAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlGgOSwRLE4aUaBJ0lFKUdS4=",
44
+ "achieved_goal": "[[-0.14071284 0.10431869 0.02 ]\n [ 0.04395882 -0.08827823 0.02 ]\n [ 0.00186481 0.00731785 0.02 ]\n [ 0.0796886 -0.02352517 0.02 ]]",
45
+ "desired_goal": "[[-0.07708827 0.06512547 0.02 ]\n [-0.04958126 -0.0259462 0.03715132]\n [ 0.1398221 -0.0660716 0.02 ]\n [ 0.14324754 0.0290951 0.02 ]]",
46
+ "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00 0.0000000e+00 -1.4071284e-01\n 1.0431869e-01 2.0000000e-02 0.0000000e+00 -0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00 0.0000000e+00 4.3958820e-02\n -8.8278227e-02 2.0000000e-02 0.0000000e+00 -0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00 0.0000000e+00 1.8648076e-03\n 7.3178457e-03 2.0000000e-02 0.0000000e+00 -0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00 0.0000000e+00 7.9688601e-02\n -2.3525169e-02 2.0000000e-02 0.0000000e+00 -0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00]]"
47
+ },
48
+ "_episode_num": 0,
49
+ "use_sde": false,
50
+ "sde_sample_freq": -1,
51
+ "_current_progress_remaining": -0.00019199999999996997,
52
+ "_stats_window_size": 100,
53
+ "ep_info_buffer": {
54
+ ":type:": "<class 'collections.deque'>",
55
+ ":serialized:": "gAWV4AsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHwEkAAAAAAACMAWyUSzKMAXSUR0CqagBR64UfdX2UKGgGRwAAAAAAAAAAaAdLAWgIR0Cqaga+FlCkdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqanImXw9adX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqag4k/r0KdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqaqcJtzjndX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqawz2WY4RdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqa346wMYudX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqay3BpHqedX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqa8OIyj59dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqbCaIeo1ldX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqbJT/hl19dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqbDGcOLBLdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqbM5+YtxudX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqbTQwTM7mdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqbaJ/5LyudX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqbUrrX18LdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqbeSxzJZGdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqblLyc0+DdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqbsIhIOH4dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqbmWom5UcdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqbwRPwd8zdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqb2myPdVOdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqb9g2AG0NdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqb4GQSzw+dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqcBcZLqUvdX2UKGgGRwAAAAAAAAAAaAdLAWgIR0CqcB0oBq9HdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqcH4iosI3dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqcPNcfNiZdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqcKI6CDmKdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqcUOkDZDidX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqcaEzwc5sdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqchP+XJHRdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqcbLpqynldX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqclBNmDlHdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqcqwiiZfEdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqcxhm5DqodX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqcsEWIoE0dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqc2QfQrtmdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqc8FAmiQDdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqdC7sF+uvdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqc8jk+5e7dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqdGpXIU8FdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqdMaSTyJ9dX2UKGgGRwAAAAAAAAAAaAdLAWgIR0CqdMzo+wC9dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqdTSdWhh6dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqdNaJZW7wdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqdW6rWAf/dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqddGyon8bdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqdj4NqgyudX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqdd4iHIp6dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqdnobOu7pdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqdt07bL2YdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqd0Q7tAs1dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqdurs0HhTdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqd4KTB68hdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqd+biZOSGdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqeFO+qR2bdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqd/xjz7MxdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqeJxwqAjIdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqeQKZtvXLdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqeW61LJ0XdX2UKGgGRwAAAAAAAAAAaAdLAWgIR0CqeXQ2uPmxdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqeRjAJswddX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqecJ97WupdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqeiU4aP0adX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqepx0EHMVdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqejQHqu8sdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqetG+9Jz1dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqezsMZxaQdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqe64Sg5BDdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqe0r876pHdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqe+bYK6WgdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqfEX5N47jdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqfMeajN6gdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqfJ2OAAhjdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqfW9AX2ugdX2UKGgGRwAAAAAAAAAAaAdLAWgIR0CqfXilBQendX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqfemhM8HOdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqfoJbD/EPdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqfnDv3JxOdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqf18h1TzedX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqf8ghKUV0dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqgE00vXbudX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqgC3Zwn6VdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqgRHFo+OfdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqgYNpM6BAdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqggXdCVrzdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqgckMspXqdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqgq0QbuMNdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqgxo0ZWJadX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqg50kGA09dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqg3vLowEhdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqhGvMjeKsdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqhNjHn2ZidX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqhWTY287IdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqhS1LzwtrdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqhdbJ4jbBdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqhjReLNwBdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqhqFQ2uPndX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqhkBmwqy4dWUu"
56
+ },
57
+ "ep_success_buffer": {
58
+ ":type:": "<class 'collections.deque'>",
59
+ ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
60
+ },
61
+ "_n_updates": 3907,
62
+ "n_steps": 64,
63
+ "gamma": 0.95,
64
+ "gae_lambda": 0.96,
65
+ "ent_coef": 0.001,
66
+ "vf_coef": 0.1,
67
+ "max_grad_norm": 0.5,
68
+ "normalize_advantage": false,
69
+ "observation_space": {
70
+ ":type:": "<class 'gymnasium.spaces.dict.Dict'>",
71
+ ":serialized:": "gAWVMgQAAAAAAACMFWd5bW5hc2l1bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwUZ3ltbmFzaXVtLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowNYm91bmRlZF9iZWxvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYDAAAAAAAAAAEBAZRoE4wCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoHCiWAwAAAAAAAAABAQGUaCBLA4WUaCR0lFKUjAZfc2hhcGWUSwOFlIwDbG93lGgcKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFksDhZRoJHSUUpSMBGhpZ2iUaBwolgwAAAAAAAAAAAAgQQAAIEEAACBBlGgWSwOFlGgkdJRSlIwIbG93X3JlcHKUjAUtMTAuMJSMCWhpZ2hfcmVwcpSMBDEwLjCUjApfbnBfcmFuZG9tlE51YowMZGVzaXJlZF9nb2FslGgNKYGUfZQoaBBoFmgZaBwolgMAAAAAAAAAAQEBlGggSwOFlGgkdJRSlGgnaBwolgMAAAAAAAAAAQEBlGggSwOFlGgkdJRSlGgsSwOFlGguaBwolgwAAAAAAAAAAAAgwQAAIMEAACDBlGgWSwOFlGgkdJRSlGgzaBwolgwAAAAAAAAAAAAgQQAAIEEAACBBlGgWSwOFlGgkdJRSlGg4jAUtMTAuMJRoOowEMTAuMJRoPE51YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgWaBloHCiWEwAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBlGggSxOFlGgkdJRSlGgnaBwolhMAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAZRoIEsThZRoJHSUUpRoLEsThZRoLmgcKJZMAAAAAAAAAAAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBZLE4WUaCR0lFKUaDNoHCiWTAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBlGgWSxOFlGgkdJRSlGg4jAUtMTAuMJRoOowEMTAuMJRoPE51YnVoLE5oEE5oPE51Yi4=",
72
+ "spaces": "OrderedDict([('achieved_goal', Box(-10.0, 10.0, (3,), float32)), ('desired_goal', Box(-10.0, 10.0, (3,), float32)), ('observation', Box(-10.0, 10.0, (19,), float32))])",
73
+ "_shape": null,
74
+ "dtype": null,
75
+ "_np_random": null
76
+ },
77
+ "action_space": {
78
+ ":type:": "<class 'gymnasium.spaces.box.Box'>",
79
+ ":serialized:": "gAWVpwEAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWBAAAAAAAAAABAQEBlGgIjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKUjA1ib3VuZGVkX2Fib3ZllGgRKJYEAAAAAAAAAAEBAQGUaBVLBIWUaBl0lFKUjAZfc2hhcGWUSwSFlIwDbG93lGgRKJYQAAAAAAAAAAAAgL8AAIC/AACAvwAAgL+UaAtLBIWUaBl0lFKUjARoaWdolGgRKJYQAAAAAAAAAAAAgD8AAIA/AACAPwAAgD+UaAtLBIWUaBl0lFKUjAhsb3dfcmVwcpSMBC0xLjCUjAloaWdoX3JlcHKUjAMxLjCUjApfbnBfcmFuZG9tlE51Yi4=",
80
+ "dtype": "float32",
81
+ "bounded_below": "[ True True True True]",
82
+ "bounded_above": "[ True True True True]",
83
+ "_shape": [
84
+ 4
85
+ ],
86
+ "low": "[-1. -1. -1. -1.]",
87
+ "high": "[1. 1. 1. 1.]",
88
+ "low_repr": "-1.0",
89
+ "high_repr": "1.0",
90
+ "_np_random": null
91
+ },
92
+ "n_envs": 4,
93
+ "lr_schedule": {
94
+ ":type:": "<class 'function'>",
95
+ ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuDQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz9QYk3S8an8hZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"
96
+ }
97
+ }
a2c-PandaPickAndPlace-v3/policy.optimizer.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ecb8685c933dd45fa525db7b1091176ffc3a3c3fdade730762e43e89ce2034d6
3
+ size 52079
a2c-PandaPickAndPlace-v3/policy.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0208c9d983d744fb96e8032959c6f8ed8cc7d974ca25b3ec1cb8e24f1228c5cc
3
+ size 53359
a2c-PandaPickAndPlace-v3/pytorch_variables.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0c35cea3b2e60fb5e7e162d3592df775cd400e575a31c72f359fb9e654ab00c5
3
+ size 864
a2c-PandaPickAndPlace-v3/system_info.txt ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ - OS: Linux-5.15.120+-x86_64-with-glibc2.35 # 1 SMP Wed Aug 30 11:19:59 UTC 2023
2
+ - Python: 3.10.12
3
+ - Stable-Baselines3: 2.1.0
4
+ - PyTorch: 2.1.0+cu118
5
+ - GPU Enabled: True
6
+ - Numpy: 1.23.5
7
+ - Cloudpickle: 2.2.1
8
+ - Gymnasium: 0.29.1
9
+ - OpenAI Gym: 0.25.2
config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=", "__module__": "stable_baselines3.common.policies", "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7c04346f31c0>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7c04346e78c0>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVgQAAAAAAAAB9lCiMD29wdGltaXplcl9jbGFzc5SME3RvcmNoLm9wdGltLnJtc3Byb3CUjAdSTVNwcm9wlJOUjBBvcHRpbWl6ZXJfa3dhcmdzlH2UKIwFYWxwaGGURz/vrhR64UeujANlcHOURz7k+LWI42jxjAx3ZWlnaHRfZGVjYXmUSwB1dS4=", "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "num_timesteps": 1000192, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1699960566683586096, "learning_rate": 0.001, "tensorboard_log": null, "_last_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWViwIAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAgiG3PrBbsj5avQk+XpMdPwfnOj8GvAk+OdtYPx0zKr9avQk+SplKv2/s0j6FuQk+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAA1ILAv61RdD+DQuM/wtS/PgaXSr+j19U+uFF6PyLOvD4p6wg/mpYjP7MHrj4hYI4/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWMAEAAAAAAACmTzY/1al4vrX/Fb9nZeM+wJjHPxuonj2hw7k/giG3PrBbsj5avQk+v8muvHz3Crz13oe8IfAcPeChuLxeYnA9pFYNPCEOg7zWE3I7XlwWP8ELEz9WkZ6+NtwuP/cZED/1W7g/K65Iv16THT8H5zo/BrwJPgN+rrwhuAi8dr6UvHT3Gz0Qhbu8XmJwPZJWDTwpDoO8rfJVO+5qBz91AS8/3a0Qv6+lSz5ZcTU+oX2NPd2wSL8521g/HTMqv1q9CT4l8q28M6gNvAa3o7zdwho9pUm8vF5icD2jVg08IA6DvEEQQjs4AQE90ocgv9UvF78+8ES/qnwDwHxrSj0lIAO/SplKv2/s0j6FuQk+a1GsvCZhC7wpe5O8cewaPZXKurxeYnA9k1YNPCkOg7wz9VQ7lGgOSwRLE4aUaBJ0lFKUdS4=", "achieved_goal": "[[ 0.35767752 0.34835577 0.13451138]\n [ 0.6155299 0.7300877 0.13450631]\n [ 0.8470951 -0.6648424 0.13451138]\n [-0.7914015 0.4119601 0.13449676]]", "desired_goal": "[[-1.5039926 0.9543713 1.7754673 ]\n [ 0.3746701 -0.79136693 0.4176608 ]\n [ 0.9778094 0.36876017 0.53483826]\n [ 0.63901675 0.3399025 1.1123086 ]]", "observation": "[[ 0.71215284 -0.24283536 -0.585933 0.444133 1.5593491 0.07746907\n 1.4512826 0.35767752 0.34835577 0.13451138 -0.02133643 -0.00848186\n -0.01658581 0.03831494 -0.02253813 0.05868756 0.00862661 -0.01599795\n 0.00369381]\n [ 0.5873469 0.5743981 -0.30970258 0.68304765 0.5628962 1.4403063\n -0.7839076 0.6155299 0.7300877 0.13450631 -0.02130032 -0.00834468\n -0.01815723 0.03807779 -0.0228906 0.05868756 0.0086266 -0.01599796\n 0.00326459]\n [ 0.52897537 0.683616 -0.56515294 0.19887422 0.1771902 0.06908727\n -0.7839487 0.8470951 -0.6648424 0.13451138 -0.02123363 -0.00864606\n -0.01998473 0.03778349 -0.02298434 0.05868756 0.00862661 -0.01599795\n 0.00296117]\n [ 0.0314953 -0.62707245 -0.5905736 -0.7692908 -2.054484 0.04941891\n -0.51220924 -0.7914015 0.4119601 0.13449676 -0.02103492 -0.00850705\n -0.01800306 0.03782314 -0.02280168 0.05868756 0.0086266 -0.01599796\n 0.00324948]]"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWViwIAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAABxcQvgql1T0K16M8Kg40PTfLtL0K16M8j2z0OorK7zsK16M8xzOjPdu3wLwK16M8lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAdOCdvYBghT0K16M8uRVLvSKN1Lz7Kxg9hi0PPoxQh70K16M8fK8SPtdY7jwK16M8lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWMAEAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAAAAAAAABxcQvgql1T0K16M8AAAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA6nIdPRlsGqxDI0o+AAAAAAAAAIAAAAAAAAAAACoOND03y7S9CtejPAAAAAAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOpyHT0ZbBqsQyNKPgAAAAAAAACAAAAAAAAAAACPbPQ6isrvOwrXozwAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAAAAAAAAxzOjPdu3wLwK16M8AAAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlGgOSwRLE4aUaBJ0lFKUdS4=", "achieved_goal": "[[-0.14071284 0.10431869 0.02 ]\n [ 0.04395882 -0.08827823 0.02 ]\n [ 0.00186481 0.00731785 0.02 ]\n [ 0.0796886 -0.02352517 0.02 ]]", "desired_goal": "[[-0.07708827 0.06512547 0.02 ]\n [-0.04958126 -0.0259462 0.03715132]\n [ 0.1398221 -0.0660716 0.02 ]\n [ 0.14324754 0.0290951 0.02 ]]", "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00 0.0000000e+00 -1.4071284e-01\n 1.0431869e-01 2.0000000e-02 0.0000000e+00 -0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00 0.0000000e+00 4.3958820e-02\n -8.8278227e-02 2.0000000e-02 0.0000000e+00 -0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00 0.0000000e+00 1.8648076e-03\n 7.3178457e-03 2.0000000e-02 0.0000000e+00 -0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00 0.0000000e+00 7.9688601e-02\n -2.3525169e-02 2.0000000e-02 0.0000000e+00 -0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00\n 0.0000000e+00 0.0000000e+00 0.0000000e+00]]"}, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": -0.00019199999999996997, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWV4AsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHwEkAAAAAAACMAWyUSzKMAXSUR0CqagBR64UfdX2UKGgGRwAAAAAAAAAAaAdLAWgIR0Cqaga+FlCkdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqanImXw9adX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqag4k/r0KdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqaqcJtzjndX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqawz2WY4RdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqa346wMYudX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqay3BpHqedX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqa8OIyj59dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqbCaIeo1ldX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqbJT/hl19dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqbDGcOLBLdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqbM5+YtxudX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqbTQwTM7mdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqbaJ/5LyudX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqbUrrX18LdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqbeSxzJZGdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqblLyc0+DdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqbsIhIOH4dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqbmWom5UcdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqbwRPwd8zdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqb2myPdVOdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqb9g2AG0NdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqb4GQSzw+dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqcBcZLqUvdX2UKGgGRwAAAAAAAAAAaAdLAWgIR0CqcB0oBq9HdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqcH4iosI3dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqcPNcfNiZdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqcKI6CDmKdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqcUOkDZDidX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqcaEzwc5sdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqchP+XJHRdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqcbLpqynldX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqclBNmDlHdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqcqwiiZfEdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqcxhm5DqodX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqcsEWIoE0dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqc2QfQrtmdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqc8FAmiQDdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqdC7sF+uvdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqc8jk+5e7dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqdGpXIU8FdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqdMaSTyJ9dX2UKGgGRwAAAAAAAAAAaAdLAWgIR0CqdMzo+wC9dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqdTSdWhh6dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqdNaJZW7wdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqdW6rWAf/dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqddGyon8bdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqdj4NqgyudX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqdd4iHIp6dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqdnobOu7pdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqdt07bL2YdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqd0Q7tAs1dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqdurs0HhTdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqd4KTB68hdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqd+biZOSGdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqeFO+qR2bdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqd/xjz7MxdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqeJxwqAjIdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqeQKZtvXLdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqeW61LJ0XdX2UKGgGRwAAAAAAAAAAaAdLAWgIR0CqeXQ2uPmxdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqeRjAJswddX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqecJ97WupdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqeiU4aP0adX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqepx0EHMVdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqejQHqu8sdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqetG+9Jz1dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqezsMZxaQdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqe64Sg5BDdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqe0r876pHdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqe+bYK6WgdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqfEX5N47jdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqfMeajN6gdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqfJ2OAAhjdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqfW9AX2ugdX2UKGgGRwAAAAAAAAAAaAdLAWgIR0CqfXilBQendX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqfemhM8HOdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqfoJbD/EPdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqfnDv3JxOdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqf18h1TzedX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqf8ghKUV0dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqgE00vXbudX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqgC3Zwn6VdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqgRHFo+OfdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqgYNpM6BAdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqggXdCVrzdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqgckMspXqdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqgq0QbuMNdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqgxo0ZWJadX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqg50kGA09dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cqg3vLowEhdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqhGvMjeKsdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqhNjHn2ZidX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqhWTY287IdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqhS1LzwtrdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqhdbJ4jbBdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqhjReLNwBdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqhqFQ2uPndX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CqhkBmwqy4dWUu"}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 3907, "n_steps": 64, "gamma": 0.95, "gae_lambda": 0.96, "ent_coef": 0.001, "vf_coef": 0.1, "max_grad_norm": 0.5, "normalize_advantage": false, "observation_space": {":type:": "<class 'gymnasium.spaces.dict.Dict'>", ":serialized:": "gAWVMgQAAAAAAACMFWd5bW5hc2l1bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwUZ3ltbmFzaXVtLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowNYm91bmRlZF9iZWxvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYDAAAAAAAAAAEBAZRoE4wCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoHCiWAwAAAAAAAAABAQGUaCBLA4WUaCR0lFKUjAZfc2hhcGWUSwOFlIwDbG93lGgcKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFksDhZRoJHSUUpSMBGhpZ2iUaBwolgwAAAAAAAAAAAAgQQAAIEEAACBBlGgWSwOFlGgkdJRSlIwIbG93X3JlcHKUjAUtMTAuMJSMCWhpZ2hfcmVwcpSMBDEwLjCUjApfbnBfcmFuZG9tlE51YowMZGVzaXJlZF9nb2FslGgNKYGUfZQoaBBoFmgZaBwolgMAAAAAAAAAAQEBlGggSwOFlGgkdJRSlGgnaBwolgMAAAAAAAAAAQEBlGggSwOFlGgkdJRSlGgsSwOFlGguaBwolgwAAAAAAAAAAAAgwQAAIMEAACDBlGgWSwOFlGgkdJRSlGgzaBwolgwAAAAAAAAAAAAgQQAAIEEAACBBlGgWSwOFlGgkdJRSlGg4jAUtMTAuMJRoOowEMTAuMJRoPE51YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgWaBloHCiWEwAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBlGggSxOFlGgkdJRSlGgnaBwolhMAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAZRoIEsThZRoJHSUUpRoLEsThZRoLmgcKJZMAAAAAAAAAAAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBZLE4WUaCR0lFKUaDNoHCiWTAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBlGgWSxOFlGgkdJRSlGg4jAUtMTAuMJRoOowEMTAuMJRoPE51YnVoLE5oEE5oPE51Yi4=", "spaces": "OrderedDict([('achieved_goal', Box(-10.0, 10.0, (3,), float32)), ('desired_goal', Box(-10.0, 10.0, (3,), float32)), ('observation', Box(-10.0, 10.0, (19,), float32))])", "_shape": null, "dtype": null, "_np_random": null}, "action_space": {":type:": "<class 'gymnasium.spaces.box.Box'>", ":serialized:": "gAWVpwEAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWBAAAAAAAAAABAQEBlGgIjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKUjA1ib3VuZGVkX2Fib3ZllGgRKJYEAAAAAAAAAAEBAQGUaBVLBIWUaBl0lFKUjAZfc2hhcGWUSwSFlIwDbG93lGgRKJYQAAAAAAAAAAAAgL8AAIC/AACAvwAAgL+UaAtLBIWUaBl0lFKUjARoaWdolGgRKJYQAAAAAAAAAAAAgD8AAIA/AACAPwAAgD+UaAtLBIWUaBl0lFKUjAhsb3dfcmVwcpSMBC0xLjCUjAloaWdoX3JlcHKUjAMxLjCUjApfbnBfcmFuZG9tlE51Yi4=", "dtype": "float32", "bounded_below": "[ True True True True]", "bounded_above": "[ True True True True]", "_shape": [4], "low": "[-1. -1. -1. -1.]", "high": "[1. 1. 1. 1.]", "low_repr": "-1.0", "high_repr": "1.0", "_np_random": null}, "n_envs": 4, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuDQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz9QYk3S8an8hZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "system_info": {"OS": "Linux-5.15.120+-x86_64-with-glibc2.35 # 1 SMP Wed Aug 30 11:19:59 UTC 2023", "Python": "3.10.12", "Stable-Baselines3": "2.1.0", "PyTorch": "2.1.0+cu118", "GPU Enabled": "True", "Numpy": "1.23.5", "Cloudpickle": "2.2.1", "Gymnasium": "0.29.1", "OpenAI Gym": "0.25.2"}}
replay.mp4 ADDED
Binary file (806 kB). View file
 
results.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"mean_reward": -45.0, "std_reward": 15.0, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2023-11-14T12:16:26.466754"}
vec_normalize.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:223a178c23224878f9567409064cc86c6dfe925ef71bad294f56d18e48b40ee7
3
+ size 3013