| ------------ Options ------------- | |
| base_dim: 512 | |
| batch_size: 64 | |
| beta_schedule: linear | |
| checkpoints_dir: ./checkpoints | |
| clip_grad_norm: 1 | |
| cond_mask_prob: 0.1 | |
| continue_ckpt: latest.tar | |
| dataset_name: t2m | |
| debug: False | |
| decay_rate: 0.9 | |
| diffusion_steps: 1000 | |
| dim_mults: [2, 2, 2, 2] | |
| dropout: 0.1 | |
| feat_bias: 5 | |
| is_continue: False | |
| latent_dim: 512 | |
| log_every: 500 | |
| lr: 0.0001 | |
| model_ema: True | |
| model_ema_decay: 0.9999 | |
| model_ema_steps: 32 | |
| name: self_attn—fulllayer-ffn-drop0_1-lr1e4 | |
| no_adagn: False | |
| no_eff: True | |
| num_layers: 8 | |
| num_train_steps: 50000 | |
| prediction_type: sample | |
| save_interval: 10000 | |
| seed: 0 | |
| self_attention: True | |
| text_latent_dim: 256 | |
| time_dim: 512 | |
| update_lr_steps: 5000 | |
| vis_attn: False | |
| weight_decay: 0.01 | |
| -------------- End ---------------- | |