ppo-seals-Walker2d-v1 / config.yml
ernestum's picture
Initial commit
8c219a1
raw
history blame contribute delete
809 Bytes
!!python/object/apply:collections.OrderedDict
- - - batch_size
- 8
- - clip_range
- 0.4
- - ent_coef
- 0.00013057334805552262
- - gae_lambda
- 0.92
- - gamma
- 0.98
- - learning_rate
- 3.791707778339674e-05
- - max_grad_norm
- 0.6
- - n_envs
- 1
- - n_epochs
- 5
- - n_steps
- 2048
- - n_timesteps
- 1000000.0
- - normalize
- gamma: 0.98
norm_obs: false
norm_reward: true
- - policy
- MlpPolicy
- - policy_kwargs
- activation_fn: !!python/name:torch.nn.modules.activation.ReLU ''
features_extractor_class: !!python/name:imitation.policies.base.NormalizeFeaturesExtractor ''
net_arch:
- pi:
- 256
- 256
vf:
- 256
- 256
- - vf_coef
- 0.6167177795726859