|
{ |
|
"act_dim": 6, |
|
"action_tanh": true, |
|
"activation_function": "relu", |
|
"adv_act_dim": 6, |
|
"architectures": [ |
|
"SimpleRobustDT" |
|
], |
|
"attn_pdrop": 0.1, |
|
"bos_token_id": 50256, |
|
"context_size": 20, |
|
"embd_pdrop": 0.1, |
|
"eos_token_id": 50256, |
|
"hidden_size": 128, |
|
"initializer_range": 0.02, |
|
"lambda1": 1.0, |
|
"lambda2": 10.0, |
|
"layer_norm_epsilon": 1e-05, |
|
"log_interval_steps": 100, |
|
"max_ep_len": 1000, |
|
"max_ep_return": 2000.0, |
|
"max_obs_len": 999, |
|
"max_obs_return": 1780.811459379, |
|
"min_obs_return": -1069.52315951672, |
|
"model_type": "decision_transformer", |
|
"n_head": 1, |
|
"n_inner": null, |
|
"n_layer": 3, |
|
"n_positions": 1024, |
|
"pr_act_dim": 6, |
|
"reorder_and_upcast_attn": false, |
|
"resid_pdrop": 0.1, |
|
"returns_scale": 1000, |
|
"scale_attn_by_inverse_layer_idx": false, |
|
"scale_attn_weights": true, |
|
"state_dim": 17, |
|
"state_mean": [ |
|
-0.12111923038336422, |
|
0.25327425155418065, |
|
-0.07035047587677959, |
|
-0.08115773692056283, |
|
0.033850979115650645, |
|
0.06298736361412782, |
|
-0.12843807016838418, |
|
-0.031447396928089406, |
|
1.3713138326103653, |
|
-0.024560892924977706, |
|
-0.006480418987622041, |
|
0.001549001309508945, |
|
-0.07614348509836881, |
|
0.1161295094948307, |
|
-0.03387701949143711, |
|
0.05022231275001467, |
|
0.06985786476000175 |
|
], |
|
"state_std": [ |
|
0.14385576355954735, |
|
0.7782796689571053, |
|
0.39214212048836883, |
|
0.33540646757015297, |
|
0.3667258401328152, |
|
0.42185300591286856, |
|
0.42122407941439594, |
|
0.38196441128485303, |
|
0.894889347650139, |
|
0.9790099847764456, |
|
1.6209753212087943, |
|
5.481261290311935, |
|
5.412253453556258, |
|
5.821229850387846, |
|
5.340453971568788, |
|
6.390796838411146, |
|
5.755205744270759 |
|
], |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.31.0", |
|
"use_cache": true, |
|
"vocab_size": 1, |
|
"warmup_steps": 1000 |
|
} |
|
|