dficenec's picture
Initial commit
0d8bffe
raw
history blame contribute delete
551 Bytes
!!python/object/apply:collections.OrderedDict
- - - batch_size
- 128
- - buffer_size
- 800000
- - env_wrapper
- - stable_baselines3.common.atari_wrappers.AtariWrapper
- - exploration_final_eps
- 0.01
- - exploration_fraction
- 0.1
- - frame_stack
- 4
- - gradient_steps
- 1
- - learning_rate
- 0.00027
- - learning_starts
- 100000
- - n_timesteps
- 2500000.0
- - optimize_memory_usage
- false
- - policy
- CnnPolicy
- - target_update_interval
- 1000
- - train_freq
- 4