File size: 1,121 Bytes
cb415af 3f4d482 cb415af 3f4d482 cb415af 3f4d482 cb415af |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 |
{
"action_chunk_size": 5,
"bet_softmax_temperature": 0.1,
"crop_is_random": true,
"crop_shape": [
84,
84
],
"dropout": 0.1,
"gpt_block_size": 500,
"gpt_hidden_dim": 512,
"gpt_input_dim": 512,
"gpt_n_head": 8,
"gpt_n_layer": 8,
"gpt_output_dim": 512,
"input_normalization_modes": {
"observation.images.elp0": "mean_std",
"observation.state": "mean_std"
},
"input_shapes": {
"observation.images.elp0": [
3,
600,
800
],
"observation.state": [
6
]
},
"mlp_hidden_dim": 1024,
"n_action_pred_token": 7,
"n_obs_steps": 5,
"n_vqvae_training_steps": 20000,
"offset_loss_weight": 10000.0,
"output_normalization_modes": {
"action": "min_max"
},
"output_shapes": {
"action": [
6
]
},
"pretrained_backbone_weights": null,
"primary_code_loss_weight": 5.0,
"secondary_code_loss_weight": 0.5,
"sequentially_select": false,
"spatial_softmax_num_keypoints": 32,
"use_group_norm": true,
"vision_backbone": "resnet18",
"vqvae_embedding_dim": 256,
"vqvae_enc_hidden_dim": 128,
"vqvae_n_embed": 16
} |