dim-tsoukalas
commited on
Commit
•
b2d7b3a
1
Parent(s):
11a59cb
Initial commit
Browse files- README.md +37 -0
- a2c-PandaPickAndPlace-v3.zip +3 -0
- a2c-PandaPickAndPlace-v3/_stable_baselines3_version +1 -0
- a2c-PandaPickAndPlace-v3/data +97 -0
- a2c-PandaPickAndPlace-v3/policy.optimizer.pth +3 -0
- a2c-PandaPickAndPlace-v3/policy.pth +3 -0
- a2c-PandaPickAndPlace-v3/pytorch_variables.pth +3 -0
- a2c-PandaPickAndPlace-v3/system_info.txt +9 -0
- config.json +1 -0
- replay.mp4 +0 -0
- results.json +1 -0
- vec_normalize.pkl +3 -0
README.md
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
library_name: stable-baselines3
|
3 |
+
tags:
|
4 |
+
- PandaPickAndPlace-v3
|
5 |
+
- deep-reinforcement-learning
|
6 |
+
- reinforcement-learning
|
7 |
+
- stable-baselines3
|
8 |
+
model-index:
|
9 |
+
- name: A2C
|
10 |
+
results:
|
11 |
+
- task:
|
12 |
+
type: reinforcement-learning
|
13 |
+
name: reinforcement-learning
|
14 |
+
dataset:
|
15 |
+
name: PandaPickAndPlace-v3
|
16 |
+
type: PandaPickAndPlace-v3
|
17 |
+
metrics:
|
18 |
+
- type: mean_reward
|
19 |
+
value: -50.00 +/- 0.00
|
20 |
+
name: mean_reward
|
21 |
+
verified: false
|
22 |
+
---
|
23 |
+
|
24 |
+
# **A2C** Agent playing **PandaPickAndPlace-v3**
|
25 |
+
This is a trained model of a **A2C** agent playing **PandaPickAndPlace-v3**
|
26 |
+
using the [stable-baselines3 library](https://github.com/DLR-RM/stable-baselines3).
|
27 |
+
|
28 |
+
## Usage (with Stable-baselines3)
|
29 |
+
TODO: Add your code
|
30 |
+
|
31 |
+
|
32 |
+
```python
|
33 |
+
from stable_baselines3 import ...
|
34 |
+
from huggingface_sb3 import load_from_hub
|
35 |
+
|
36 |
+
...
|
37 |
+
```
|
a2c-PandaPickAndPlace-v3.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9c5b82a35929465fe0416bb975c13a227317366a08969552be905790603a696e
|
3 |
+
size 124543
|
a2c-PandaPickAndPlace-v3/_stable_baselines3_version
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
2.1.0
|
a2c-PandaPickAndPlace-v3/data
ADDED
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"policy_class": {
|
3 |
+
":type:": "<class 'abc.ABCMeta'>",
|
4 |
+
":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=",
|
5 |
+
"__module__": "stable_baselines3.common.policies",
|
6 |
+
"__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
+
"__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x797e5fed43a0>",
|
8 |
+
"__abstractmethods__": "frozenset()",
|
9 |
+
"_abc_impl": "<_abc._abc_data object at 0x797e5fecd580>"
|
10 |
+
},
|
11 |
+
"verbose": 1,
|
12 |
+
"policy_kwargs": {
|
13 |
+
":type:": "<class 'dict'>",
|
14 |
+
":serialized:": "gAWVgQAAAAAAAAB9lCiMD29wdGltaXplcl9jbGFzc5SME3RvcmNoLm9wdGltLnJtc3Byb3CUjAdSTVNwcm9wlJOUjBBvcHRpbWl6ZXJfa3dhcmdzlH2UKIwFYWxwaGGURz/vrhR64UeujANlcHOURz7k+LWI42jxjAx3ZWlnaHRfZGVjYXmUSwB1dS4=",
|
15 |
+
"optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>",
|
16 |
+
"optimizer_kwargs": {
|
17 |
+
"alpha": 0.99,
|
18 |
+
"eps": 1e-05,
|
19 |
+
"weight_decay": 0
|
20 |
+
}
|
21 |
+
},
|
22 |
+
"num_timesteps": 1000000,
|
23 |
+
"_total_timesteps": 1000000,
|
24 |
+
"_num_timesteps_at_start": 0,
|
25 |
+
"seed": null,
|
26 |
+
"action_noise": null,
|
27 |
+
"start_time": 1697999708116909131,
|
28 |
+
"learning_rate": 0.0007,
|
29 |
+
"tensorboard_log": null,
|
30 |
+
"_last_obs": {
|
31 |
+
":type:": "<class 'collections.OrderedDict'>",
|
32 |
+
":serialized:": "gAWViwIAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAZ/C8PxRLh7/lsvY9fQpmP4D6Rj7lsvY9pA5cvKwTYT9ds/Y9mGGbv6w9fL9fqfY9lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAABNxhPxf/n7/9iwQ/xqKrvhS3iD8/Xoq/XQ7cP6yurr6bYmo/YxRWP61yoL+lAK4/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWMAEAAAAAAAAi+OS8Pxy9P9FxX7+w03o99gMLuiMBRz1/6O2+Z/C8PxRLh7/lsvY9zSqUOiB2gbxRDei8x95iPZmvKr2so2I9exBpPAnWe7rfWaM6u+nKPv8CqL+ku2s/zT2jPYT4gD0Ptn8+FPntvn0KZj+A+kY+5bL2PcUqlDohdoG8KRHrvNveYj2Oryq9raNiPdIPaTzwwHu6UVajOlBnhb7Y3ss/jIJfv83Wuz12Be68JLY5PRHn7b6kDly8rBNhP12z9j2Y+oc6B+uCvJU387xoxWI9FKMrvRZAZD3d4XM8U3vgupaZdToKKi8+EjV+PrlSZr/P5Go+KfaqPiHTC79L8OS+mGGbv6w9fL9fqfY97kaqOmU+gbycEe+8ODZjPaWALL0Gb2M9tHGDPN46tTlQbos6lGgOSwRLE4aUaBJ0lFKUdS4=",
|
33 |
+
"achieved_goal": "[[ 1.4760865 -1.0569787 0.1204584 ]\n [ 0.89859754 0.19431496 0.1204584 ]\n [-0.01343122 0.8792064 0.1204593 ]\n [-1.2139158 -0.98531604 0.12044024]]",
|
34 |
+
"desired_goal": "[[ 0.8822634 -1.2499722 0.51776105]\n [-0.33522624 1.0680871 -1.0810012 ]\n [ 1.7191883 -0.3411764 0.9155671 ]\n [ 0.8362486 -1.2534996 1.3593947 ]]",
|
35 |
+
"observation": "[[-2.79503502e-02 1.47742450e+00 -8.72830451e-01 6.12370372e-02\n -5.30301942e-04 4.85850684e-02 -4.64664429e-01 1.47608650e+00\n -1.05697870e+00 1.20458402e-01 1.13042595e-03 -1.58033967e-02\n -2.83266623e-02 5.53882383e-02 -4.16713692e-02 5.53318709e-02\n 1.42251207e-02 -9.60678386e-04 1.24626968e-03]\n [ 3.96314472e-01 -1.31259143e+00 9.20831919e-01 7.97077194e-02\n 6.29740059e-02 2.49717936e-01 -4.64790940e-01 8.98597538e-01\n 1.94314957e-01 1.20458402e-01 1.13042502e-03 -1.58033986e-02\n -2.86947060e-02 5.53883128e-02 -4.16713282e-02 5.53318746e-02\n 1.42249633e-02 -9.60364006e-04 1.24616374e-03]\n [-2.60553837e-01 1.59273815e+00 -8.73085737e-01 9.17182937e-02\n -2.90553384e-02 4.53397185e-02 -4.64653522e-01 -1.34312250e-02\n 8.79206419e-01 1.20459296e-01 1.03743654e-03 -1.59812104e-02\n -2.96895895e-02 5.53640425e-02 -4.19035703e-02 5.57251796e-02\n 1.48853930e-02 -1.71265972e-03 9.36889439e-04]\n [ 1.71058804e-01 2.48249322e-01 -8.99699748e-01 2.29388461e-01\n 3.33909303e-01 -5.46190321e-01 -4.47145790e-01 -1.21391582e+00\n -9.85316038e-01 1.20440237e-01 1.29911094e-03 -1.57768223e-02\n -2.91832015e-02 5.54716289e-02 -4.21148725e-02 5.55258021e-02\n 1.60454288e-02 3.45668697e-04 1.06377341e-03]]"
|
36 |
+
},
|
37 |
+
"_last_episode_starts": {
|
38 |
+
":type:": "<class 'numpy.ndarray'>",
|
39 |
+
":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="
|
40 |
+
},
|
41 |
+
"_last_original_obs": {
|
42 |
+
":type:": "<class 'collections.OrderedDict'>",
|
43 |
+
":serialized:": "gAWViwIAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAFdcNvhGg2r0K16M8JkYGvqKYlb0K16M8N/mnvVwRJDwK16M8uLGYPQcfGL4K16M8lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAff8Xvi3Ipz1NK9M9a9jPPMaJDT0K16M87Fukvbkxxb2bOgU+KHRXPL2PQz2lNPw9lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWMAEAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAAAAAAAAFdcNvhGg2r0K16M8AAAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA6nIdPRlsGqxDI0o+AAAAAAAAAIAAAAAAAAAAACZGBr6imJW9CtejPAAAAAAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOpyHT0ZbBqsQyNKPgAAAAAAAACAAAAAAAAAAAA3+ae9XBEkPArXozwAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAAAAAAAAuLGYPQcfGL4K16M8AAAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlGgOSwRLE4aUaBJ0lFKUdS4=",
|
44 |
+
"achieved_goal": "[[-0.13851579 -0.10675061 0.02 ]\n [-0.13112697 -0.07304503 0.02 ]\n [-0.08201831 0.0100139 0.02 ]\n [ 0.07455772 -0.14855586 0.02 ]]",
|
45 |
+
"desired_goal": "[[-0.14843555 0.08192477 0.10310993]\n [ 0.02537175 0.03455522 0.02 ]\n [-0.08025345 -0.09628624 0.13010637]\n [ 0.01315025 0.0477445 0.12314729]]",
|
46 |
+
"observation": "[[ 3.84396687e-02 -2.19447225e-12 1.97400138e-01 0.00000000e+00\n -0.00000000e+00 0.00000000e+00 0.00000000e+00 -1.38515785e-01\n -1.06750615e-01 1.99999996e-02 0.00000000e+00 -0.00000000e+00\n 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n 0.00000000e+00 0.00000000e+00 0.00000000e+00]\n [ 3.84396687e-02 -2.19447225e-12 1.97400138e-01 0.00000000e+00\n -0.00000000e+00 0.00000000e+00 0.00000000e+00 -1.31126970e-01\n -7.30450302e-02 1.99999996e-02 0.00000000e+00 -0.00000000e+00\n 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n 0.00000000e+00 0.00000000e+00 0.00000000e+00]\n [ 3.84396687e-02 -2.19447225e-12 1.97400138e-01 0.00000000e+00\n -0.00000000e+00 0.00000000e+00 0.00000000e+00 -8.20183083e-02\n 1.00139044e-02 1.99999996e-02 0.00000000e+00 -0.00000000e+00\n 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n 0.00000000e+00 0.00000000e+00 0.00000000e+00]\n [ 3.84396687e-02 -2.19447225e-12 1.97400138e-01 0.00000000e+00\n -0.00000000e+00 0.00000000e+00 0.00000000e+00 7.45577216e-02\n -1.48555860e-01 1.99999996e-02 0.00000000e+00 -0.00000000e+00\n 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n 0.00000000e+00 0.00000000e+00 0.00000000e+00]]"
|
47 |
+
},
|
48 |
+
"_episode_num": 0,
|
49 |
+
"use_sde": false,
|
50 |
+
"sde_sample_freq": -1,
|
51 |
+
"_current_progress_remaining": 0.0,
|
52 |
+
"_stats_window_size": 100,
|
53 |
+
"ep_info_buffer": {
|
54 |
+
":type:": "<class 'collections.deque'>",
|
55 |
+
":serialized:": "gAWV4AsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHwEkAAAAAAACMAWyUSzKMAXSUR0CnsKzyJ9ApdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnsruxSpBHdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnsmvPTodNdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnskKV6eGxdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnshtnf2sadX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cns8uF6AvtdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cns3x2KVIJdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cns07tqpLmdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnsyxRdhRZdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CntNr7wazedX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CntItb9qDcdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CntGBbfP5YdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CntDfICEHudX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CntfGetjkNdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CntaFUADJVdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CntXnyEtdzdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CntVH5SFXadX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cntvfhl18tdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cntqd5prULdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cntn1n/T9bdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CntlYaP0ZndX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnuAposZpBdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnt7uVX3g2dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnt5ZM10kodX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnt25m7J4jdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnuROjZcs2dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnuMQtjCpFdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnuJ1/tpmFdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnuHlt0mtydX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnuiME7nxKdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnudJHAh0RdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnuaht1p0wdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnuYA1vVEvdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnuyd5yEL6dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnutgQYk3TdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnuqzxG2CvdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnuoVLi++NdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvDDDTBqLdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnu+Hck+otdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnu7jUmUnpdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnu5BYFJQMdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvTLOqvNedX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvOKZUkv9dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvLWwu/UOdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvI1wYLssdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvjfTkQwsdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvehDPWxydX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvcTYNAkcdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvZ3buc+adX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnv09+w1R+dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvwGeMAFQdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvtkYwZfldX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvrGb1AZ9dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnwGyhJyyVdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnwBwgs9SudX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnv/JS75EddX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnv8na37UHdX2UKGgGR8AyAAAAAAAAaAdLE2gIR0CnwIHiFTNudX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnwXbcoH9ndX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnwQHC4z7/dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnwNo0ygwodX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnwZn27FsIdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnwpUoKD02dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnwhrSNOuadX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnwfRD1GsndX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnwq0EovzwdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnw6FNL128dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnwyebVjI8dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnwwM0P6KtdX2UKGgGRwAAAAAAAAAAaAdLAWgIR0Cnwwjtoi9qdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnw7pkPMB7dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnxKvJA+pwdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnxC9/z8P4dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnxAvVd5Y6dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnxLwNsnAqdX2UKGgGR8A2AAAAAAAAaAdLF2gIR0CnxIvL5h0AdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnxbqyGBWgdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnxUO6ErXldX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnxdXt8eCDdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnxaGQbMoudX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnxuuUdJardX2UKGgGRwAAAAAAAAAAaAdLAWgIR0CnxvPZyuIRdX2UKGgGRwAAAAAAAAAAaAdLAWgIR0Cnxvsx46fbdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnxpO2AoXsdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnx0TRhMJydX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnx0YlyBCldX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnyKZJ9RaYdX2UKGgGRwAAAAAAAAAAaAdLAWgIR0CnyK5VwPy1dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnyEEdmxt6dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnyO7gsK9gdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnyQM4LkS3dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnynZ1/2CedX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnyhdBBzFNdX2UKGgGRwAAAAAAAAAAaAdLAWgIR0Cnyh/OMVDbdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnysrdepn6dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnyuZZ0SyudX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnzFmVRk3CdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cny/nBUJfIdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnzJhrFfiQdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnzGjgydnTdWUu"
|
56 |
+
},
|
57 |
+
"ep_success_buffer": {
|
58 |
+
":type:": "<class 'collections.deque'>",
|
59 |
+
":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
|
60 |
+
},
|
61 |
+
"_n_updates": 50000,
|
62 |
+
"n_steps": 5,
|
63 |
+
"gamma": 0.99,
|
64 |
+
"gae_lambda": 1.0,
|
65 |
+
"ent_coef": 0.0,
|
66 |
+
"vf_coef": 0.5,
|
67 |
+
"max_grad_norm": 0.5,
|
68 |
+
"normalize_advantage": false,
|
69 |
+
"observation_space": {
|
70 |
+
":type:": "<class 'gymnasium.spaces.dict.Dict'>",
|
71 |
+
":serialized:": "gAWVMgQAAAAAAACMFWd5bW5hc2l1bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwUZ3ltbmFzaXVtLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowNYm91bmRlZF9iZWxvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYDAAAAAAAAAAEBAZRoE4wCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoHCiWAwAAAAAAAAABAQGUaCBLA4WUaCR0lFKUjAZfc2hhcGWUSwOFlIwDbG93lGgcKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFksDhZRoJHSUUpSMBGhpZ2iUaBwolgwAAAAAAAAAAAAgQQAAIEEAACBBlGgWSwOFlGgkdJRSlIwIbG93X3JlcHKUjAUtMTAuMJSMCWhpZ2hfcmVwcpSMBDEwLjCUjApfbnBfcmFuZG9tlE51YowMZGVzaXJlZF9nb2FslGgNKYGUfZQoaBBoFmgZaBwolgMAAAAAAAAAAQEBlGggSwOFlGgkdJRSlGgnaBwolgMAAAAAAAAAAQEBlGggSwOFlGgkdJRSlGgsSwOFlGguaBwolgwAAAAAAAAAAAAgwQAAIMEAACDBlGgWSwOFlGgkdJRSlGgzaBwolgwAAAAAAAAAAAAgQQAAIEEAACBBlGgWSwOFlGgkdJRSlGg4jAUtMTAuMJRoOowEMTAuMJRoPE51YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgWaBloHCiWEwAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBlGggSxOFlGgkdJRSlGgnaBwolhMAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAZRoIEsThZRoJHSUUpRoLEsThZRoLmgcKJZMAAAAAAAAAAAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBZLE4WUaCR0lFKUaDNoHCiWTAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBlGgWSxOFlGgkdJRSlGg4jAUtMTAuMJRoOowEMTAuMJRoPE51YnVoLE5oEE5oPE51Yi4=",
|
72 |
+
"spaces": "OrderedDict([('achieved_goal', Box(-10.0, 10.0, (3,), float32)), ('desired_goal', Box(-10.0, 10.0, (3,), float32)), ('observation', Box(-10.0, 10.0, (19,), float32))])",
|
73 |
+
"_shape": null,
|
74 |
+
"dtype": null,
|
75 |
+
"_np_random": null
|
76 |
+
},
|
77 |
+
"action_space": {
|
78 |
+
":type:": "<class 'gymnasium.spaces.box.Box'>",
|
79 |
+
":serialized:": "gAWVpwEAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWBAAAAAAAAAABAQEBlGgIjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKUjA1ib3VuZGVkX2Fib3ZllGgRKJYEAAAAAAAAAAEBAQGUaBVLBIWUaBl0lFKUjAZfc2hhcGWUSwSFlIwDbG93lGgRKJYQAAAAAAAAAAAAgL8AAIC/AACAvwAAgL+UaAtLBIWUaBl0lFKUjARoaWdolGgRKJYQAAAAAAAAAAAAgD8AAIA/AACAPwAAgD+UaAtLBIWUaBl0lFKUjAhsb3dfcmVwcpSMBC0xLjCUjAloaWdoX3JlcHKUjAMxLjCUjApfbnBfcmFuZG9tlE51Yi4=",
|
80 |
+
"dtype": "float32",
|
81 |
+
"bounded_below": "[ True True True True]",
|
82 |
+
"bounded_above": "[ True True True True]",
|
83 |
+
"_shape": [
|
84 |
+
4
|
85 |
+
],
|
86 |
+
"low": "[-1. -1. -1. -1.]",
|
87 |
+
"high": "[1. 1. 1. 1.]",
|
88 |
+
"low_repr": "-1.0",
|
89 |
+
"high_repr": "1.0",
|
90 |
+
"_np_random": null
|
91 |
+
},
|
92 |
+
"n_envs": 4,
|
93 |
+
"lr_schedule": {
|
94 |
+
":type:": "<class 'function'>",
|
95 |
+
":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuDQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz9G8AaNuLrHhZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"
|
96 |
+
}
|
97 |
+
}
|
a2c-PandaPickAndPlace-v3/policy.optimizer.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cab303db709afbbb98df85a90cd541c9904bb5cc2d0609fa445d1df7983311cf
|
3 |
+
size 52079
|
a2c-PandaPickAndPlace-v3/policy.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c96fb0ec9d42d22cc3e709d2506321a5bbdf5e365c957c5bf9628c0a954aef6e
|
3 |
+
size 53359
|
a2c-PandaPickAndPlace-v3/pytorch_variables.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0c35cea3b2e60fb5e7e162d3592df775cd400e575a31c72f359fb9e654ab00c5
|
3 |
+
size 864
|
a2c-PandaPickAndPlace-v3/system_info.txt
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
- OS: Linux-5.15.120+-x86_64-with-glibc2.35 # 1 SMP Wed Aug 30 11:19:59 UTC 2023
|
2 |
+
- Python: 3.10.12
|
3 |
+
- Stable-Baselines3: 2.1.0
|
4 |
+
- PyTorch: 2.1.0+cu118
|
5 |
+
- GPU Enabled: True
|
6 |
+
- Numpy: 1.23.5
|
7 |
+
- Cloudpickle: 2.2.1
|
8 |
+
- Gymnasium: 0.29.1
|
9 |
+
- OpenAI Gym: 0.25.2
|
config.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=", "__module__": "stable_baselines3.common.policies", "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x797e5fed43a0>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x797e5fecd580>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVgQAAAAAAAAB9lCiMD29wdGltaXplcl9jbGFzc5SME3RvcmNoLm9wdGltLnJtc3Byb3CUjAdSTVNwcm9wlJOUjBBvcHRpbWl6ZXJfa3dhcmdzlH2UKIwFYWxwaGGURz/vrhR64UeujANlcHOURz7k+LWI42jxjAx3ZWlnaHRfZGVjYXmUSwB1dS4=", "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "num_timesteps": 1000000, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1697999708116909131, "learning_rate": 0.0007, "tensorboard_log": null, "_last_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWViwIAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAZ/C8PxRLh7/lsvY9fQpmP4D6Rj7lsvY9pA5cvKwTYT9ds/Y9mGGbv6w9fL9fqfY9lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAABNxhPxf/n7/9iwQ/xqKrvhS3iD8/Xoq/XQ7cP6yurr6bYmo/YxRWP61yoL+lAK4/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWMAEAAAAAAAAi+OS8Pxy9P9FxX7+w03o99gMLuiMBRz1/6O2+Z/C8PxRLh7/lsvY9zSqUOiB2gbxRDei8x95iPZmvKr2so2I9exBpPAnWe7rfWaM6u+nKPv8CqL+ku2s/zT2jPYT4gD0Ptn8+FPntvn0KZj+A+kY+5bL2PcUqlDohdoG8KRHrvNveYj2Oryq9raNiPdIPaTzwwHu6UVajOlBnhb7Y3ss/jIJfv83Wuz12Be68JLY5PRHn7b6kDly8rBNhP12z9j2Y+oc6B+uCvJU387xoxWI9FKMrvRZAZD3d4XM8U3vgupaZdToKKi8+EjV+PrlSZr/P5Go+KfaqPiHTC79L8OS+mGGbv6w9fL9fqfY97kaqOmU+gbycEe+8ODZjPaWALL0Gb2M9tHGDPN46tTlQbos6lGgOSwRLE4aUaBJ0lFKUdS4=", "achieved_goal": "[[ 1.4760865 -1.0569787 0.1204584 ]\n [ 0.89859754 0.19431496 0.1204584 ]\n [-0.01343122 0.8792064 0.1204593 ]\n [-1.2139158 -0.98531604 0.12044024]]", "desired_goal": "[[ 0.8822634 -1.2499722 0.51776105]\n [-0.33522624 1.0680871 -1.0810012 ]\n [ 1.7191883 -0.3411764 0.9155671 ]\n [ 0.8362486 -1.2534996 1.3593947 ]]", "observation": "[[-2.79503502e-02 1.47742450e+00 -8.72830451e-01 6.12370372e-02\n -5.30301942e-04 4.85850684e-02 -4.64664429e-01 1.47608650e+00\n -1.05697870e+00 1.20458402e-01 1.13042595e-03 -1.58033967e-02\n -2.83266623e-02 5.53882383e-02 -4.16713692e-02 5.53318709e-02\n 1.42251207e-02 -9.60678386e-04 1.24626968e-03]\n [ 3.96314472e-01 -1.31259143e+00 9.20831919e-01 7.97077194e-02\n 6.29740059e-02 2.49717936e-01 -4.64790940e-01 8.98597538e-01\n 1.94314957e-01 1.20458402e-01 1.13042502e-03 -1.58033986e-02\n -2.86947060e-02 5.53883128e-02 -4.16713282e-02 5.53318746e-02\n 1.42249633e-02 -9.60364006e-04 1.24616374e-03]\n [-2.60553837e-01 1.59273815e+00 -8.73085737e-01 9.17182937e-02\n -2.90553384e-02 4.53397185e-02 -4.64653522e-01 -1.34312250e-02\n 8.79206419e-01 1.20459296e-01 1.03743654e-03 -1.59812104e-02\n -2.96895895e-02 5.53640425e-02 -4.19035703e-02 5.57251796e-02\n 1.48853930e-02 -1.71265972e-03 9.36889439e-04]\n [ 1.71058804e-01 2.48249322e-01 -8.99699748e-01 2.29388461e-01\n 3.33909303e-01 -5.46190321e-01 -4.47145790e-01 -1.21391582e+00\n -9.85316038e-01 1.20440237e-01 1.29911094e-03 -1.57768223e-02\n -2.91832015e-02 5.54716289e-02 -4.21148725e-02 5.55258021e-02\n 1.60454288e-02 3.45668697e-04 1.06377341e-03]]"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWViwIAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAFdcNvhGg2r0K16M8JkYGvqKYlb0K16M8N/mnvVwRJDwK16M8uLGYPQcfGL4K16M8lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAff8Xvi3Ipz1NK9M9a9jPPMaJDT0K16M87Fukvbkxxb2bOgU+KHRXPL2PQz2lNPw9lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWMAEAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAAAAAAAAFdcNvhGg2r0K16M8AAAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA6nIdPRlsGqxDI0o+AAAAAAAAAIAAAAAAAAAAACZGBr6imJW9CtejPAAAAAAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOpyHT0ZbBqsQyNKPgAAAAAAAACAAAAAAAAAAAA3+ae9XBEkPArXozwAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAAAAAAAAuLGYPQcfGL4K16M8AAAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlGgOSwRLE4aUaBJ0lFKUdS4=", "achieved_goal": "[[-0.13851579 -0.10675061 0.02 ]\n [-0.13112697 -0.07304503 0.02 ]\n [-0.08201831 0.0100139 0.02 ]\n [ 0.07455772 -0.14855586 0.02 ]]", "desired_goal": "[[-0.14843555 0.08192477 0.10310993]\n [ 0.02537175 0.03455522 0.02 ]\n [-0.08025345 -0.09628624 0.13010637]\n [ 0.01315025 0.0477445 0.12314729]]", "observation": "[[ 3.84396687e-02 -2.19447225e-12 1.97400138e-01 0.00000000e+00\n -0.00000000e+00 0.00000000e+00 0.00000000e+00 -1.38515785e-01\n -1.06750615e-01 1.99999996e-02 0.00000000e+00 -0.00000000e+00\n 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n 0.00000000e+00 0.00000000e+00 0.00000000e+00]\n [ 3.84396687e-02 -2.19447225e-12 1.97400138e-01 0.00000000e+00\n -0.00000000e+00 0.00000000e+00 0.00000000e+00 -1.31126970e-01\n -7.30450302e-02 1.99999996e-02 0.00000000e+00 -0.00000000e+00\n 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n 0.00000000e+00 0.00000000e+00 0.00000000e+00]\n [ 3.84396687e-02 -2.19447225e-12 1.97400138e-01 0.00000000e+00\n -0.00000000e+00 0.00000000e+00 0.00000000e+00 -8.20183083e-02\n 1.00139044e-02 1.99999996e-02 0.00000000e+00 -0.00000000e+00\n 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n 0.00000000e+00 0.00000000e+00 0.00000000e+00]\n [ 3.84396687e-02 -2.19447225e-12 1.97400138e-01 0.00000000e+00\n -0.00000000e+00 0.00000000e+00 0.00000000e+00 7.45577216e-02\n -1.48555860e-01 1.99999996e-02 0.00000000e+00 -0.00000000e+00\n 0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n 0.00000000e+00 0.00000000e+00 0.00000000e+00]]"}, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWV4AsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHwEkAAAAAAACMAWyUSzKMAXSUR0CnsKzyJ9ApdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnsruxSpBHdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnsmvPTodNdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnskKV6eGxdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnshtnf2sadX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cns8uF6AvtdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cns3x2KVIJdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cns07tqpLmdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnsyxRdhRZdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CntNr7wazedX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CntItb9qDcdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CntGBbfP5YdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CntDfICEHudX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CntfGetjkNdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CntaFUADJVdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CntXnyEtdzdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CntVH5SFXadX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cntvfhl18tdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cntqd5prULdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cntn1n/T9bdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CntlYaP0ZndX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnuAposZpBdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnt7uVX3g2dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnt5ZM10kodX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnt25m7J4jdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnuROjZcs2dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnuMQtjCpFdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnuJ1/tpmFdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnuHlt0mtydX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnuiME7nxKdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnudJHAh0RdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnuaht1p0wdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnuYA1vVEvdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnuyd5yEL6dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnutgQYk3TdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnuqzxG2CvdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnuoVLi++NdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvDDDTBqLdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnu+Hck+otdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnu7jUmUnpdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnu5BYFJQMdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvTLOqvNedX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvOKZUkv9dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvLWwu/UOdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvI1wYLssdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvjfTkQwsdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvehDPWxydX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvcTYNAkcdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvZ3buc+adX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnv09+w1R+dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvwGeMAFQdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvtkYwZfldX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnvrGb1AZ9dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnwGyhJyyVdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnwBwgs9SudX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnv/JS75EddX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnv8na37UHdX2UKGgGR8AyAAAAAAAAaAdLE2gIR0CnwIHiFTNudX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnwXbcoH9ndX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnwQHC4z7/dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnwNo0ygwodX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnwZn27FsIdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnwpUoKD02dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnwhrSNOuadX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnwfRD1GsndX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnwq0EovzwdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnw6FNL128dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnwyebVjI8dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnwwM0P6KtdX2UKGgGRwAAAAAAAAAAaAdLAWgIR0Cnwwjtoi9qdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnw7pkPMB7dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnxKvJA+pwdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnxC9/z8P4dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnxAvVd5Y6dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnxLwNsnAqdX2UKGgGR8A2AAAAAAAAaAdLF2gIR0CnxIvL5h0AdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnxbqyGBWgdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnxUO6ErXldX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnxdXt8eCDdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnxaGQbMoudX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnxuuUdJardX2UKGgGRwAAAAAAAAAAaAdLAWgIR0CnxvPZyuIRdX2UKGgGRwAAAAAAAAAAaAdLAWgIR0Cnxvsx46fbdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnxpO2AoXsdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnx0TRhMJydX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnx0YlyBCldX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnyKZJ9RaYdX2UKGgGRwAAAAAAAAAAaAdLAWgIR0CnyK5VwPy1dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnyEEdmxt6dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnyO7gsK9gdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnyQM4LkS3dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnynZ1/2CedX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnyhdBBzFNdX2UKGgGRwAAAAAAAAAAaAdLAWgIR0Cnyh/OMVDbdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cnysrdepn6dX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnyuZZ0SyudX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnzFmVRk3CdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0Cny/nBUJfIdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnzJhrFfiQdX2UKGgGR8BJAAAAAAAAaAdLMmgIR0CnzGjgydnTdWUu"}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 50000, "n_steps": 5, "gamma": 0.99, "gae_lambda": 1.0, "ent_coef": 0.0, "vf_coef": 0.5, "max_grad_norm": 0.5, "normalize_advantage": false, "observation_space": {":type:": "<class 'gymnasium.spaces.dict.Dict'>", ":serialized:": "gAWVMgQAAAAAAACMFWd5bW5hc2l1bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwUZ3ltbmFzaXVtLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowNYm91bmRlZF9iZWxvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYDAAAAAAAAAAEBAZRoE4wCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoHCiWAwAAAAAAAAABAQGUaCBLA4WUaCR0lFKUjAZfc2hhcGWUSwOFlIwDbG93lGgcKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFksDhZRoJHSUUpSMBGhpZ2iUaBwolgwAAAAAAAAAAAAgQQAAIEEAACBBlGgWSwOFlGgkdJRSlIwIbG93X3JlcHKUjAUtMTAuMJSMCWhpZ2hfcmVwcpSMBDEwLjCUjApfbnBfcmFuZG9tlE51YowMZGVzaXJlZF9nb2FslGgNKYGUfZQoaBBoFmgZaBwolgMAAAAAAAAAAQEBlGggSwOFlGgkdJRSlGgnaBwolgMAAAAAAAAAAQEBlGggSwOFlGgkdJRSlGgsSwOFlGguaBwolgwAAAAAAAAAAAAgwQAAIMEAACDBlGgWSwOFlGgkdJRSlGgzaBwolgwAAAAAAAAAAAAgQQAAIEEAACBBlGgWSwOFlGgkdJRSlGg4jAUtMTAuMJRoOowEMTAuMJRoPE51YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgWaBloHCiWEwAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBlGggSxOFlGgkdJRSlGgnaBwolhMAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAZRoIEsThZRoJHSUUpRoLEsThZRoLmgcKJZMAAAAAAAAAAAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBZLE4WUaCR0lFKUaDNoHCiWTAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBAAAgQQAAIEEAACBBlGgWSxOFlGgkdJRSlGg4jAUtMTAuMJRoOowEMTAuMJRoPE51YnVoLE5oEE5oPE51Yi4=", "spaces": "OrderedDict([('achieved_goal', Box(-10.0, 10.0, (3,), float32)), ('desired_goal', Box(-10.0, 10.0, (3,), float32)), ('observation', Box(-10.0, 10.0, (19,), float32))])", "_shape": null, "dtype": null, "_np_random": null}, "action_space": {":type:": "<class 'gymnasium.spaces.box.Box'>", ":serialized:": "gAWVpwEAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWBAAAAAAAAAABAQEBlGgIjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKUjA1ib3VuZGVkX2Fib3ZllGgRKJYEAAAAAAAAAAEBAQGUaBVLBIWUaBl0lFKUjAZfc2hhcGWUSwSFlIwDbG93lGgRKJYQAAAAAAAAAAAAgL8AAIC/AACAvwAAgL+UaAtLBIWUaBl0lFKUjARoaWdolGgRKJYQAAAAAAAAAAAAgD8AAIA/AACAPwAAgD+UaAtLBIWUaBl0lFKUjAhsb3dfcmVwcpSMBC0xLjCUjAloaWdoX3JlcHKUjAMxLjCUjApfbnBfcmFuZG9tlE51Yi4=", "dtype": "float32", "bounded_below": "[ True True True True]", "bounded_above": "[ True True True True]", "_shape": [4], "low": "[-1. -1. -1. -1.]", "high": "[1. 1. 1. 1.]", "low_repr": "-1.0", "high_repr": "1.0", "_np_random": null}, "n_envs": 4, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuDQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz9G8AaNuLrHhZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "system_info": {"OS": "Linux-5.15.120+-x86_64-with-glibc2.35 # 1 SMP Wed Aug 30 11:19:59 UTC 2023", "Python": "3.10.12", "Stable-Baselines3": "2.1.0", "PyTorch": "2.1.0+cu118", "GPU Enabled": "True", "Numpy": "1.23.5", "Cloudpickle": "2.2.1", "Gymnasium": "0.29.1", "OpenAI Gym": "0.25.2"}}
|
replay.mp4
ADDED
Binary file (907 kB). View file
|
|
results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"mean_reward": -50.0, "std_reward": 0.0, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2023-10-22T19:26:17.911289"}
|
vec_normalize.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c2a3764b1b027b088127a62a85f492ddb8dfda9ce5661b77b1e82a3615a91e02
|
3 |
+
size 3023
|