|
|
|
{ |
|
|
|
"data_path": "data/enwik8/enwik8_text_document", |
|
"vocab_file": "data/gpt2-vocab.json", |
|
"merge_file": "data/gpt2-merges.txt", |
|
"lr_decay_iters": 20, |
|
"train_iters": 20, |
|
"hostfile": "None", |
|
"include": "localhost:1", |
|
"use_wandb": False, |
|
|
|
|
|
"pipe_parallel_size": 1, |
|
"model_parallel_size": 1, |
|
|
|
|
|
"num_layers": 2, |
|
"hidden_size": 8, |
|
"num_attention_heads": 4, |
|
"seq_length": 1024, |
|
"max_position_embeddings": 1024, |
|
"pos_emb": "rotary", |
|
"no_weight_tying": true, |
|
"gpt_j_residual": false, |
|
"output_layer_parallelism": "column", |
|
|
|
"scaled_upper_triang_masked_softmax_fusion": false, |
|
"bias_gelu_fusion": false, |
|
"rope_fusion": false, |
|
"layernorm_fusion": false, |
|
|
|
|
|
"optimizer": { |
|
"type": "sm3", |
|
"params": {}, |
|
}, |
|
|
|
|
|
"precision": "fp16", |
|
|
|
|
|
"init_method": "small_init", |
|
"output_layer_init_method": "wang_init", |
|
|
|
"train_micro_batch_size_per_gpu": 4, |
|
"gradient_accumulation_steps": 1, |
|
"data_impl": "mmap", |
|
"num_workers": 1, |
|
|
|
|
|
"checkpoint_activations": true, |
|
"checkpoint_num_layers": 1, |
|
"partition_activations": true, |
|
"synchronize_each_layer": true, |
|
|
|
|
|
"gradient_clipping": 1.0, |
|
"weight_decay": 0.1, |
|
"hidden_dropout": 0, |
|
"attention_dropout": 0, |
|
|
|
"distributed_backend": "nccl", |
|
"lr_decay_style": "cosine", |
|
"warmup": 0.01, |
|
"checkpoint_factor": 1000, |
|
"eval_interval": 100000, |
|
"eval_iters": 10, |
|
|
|
"log_interval": 10, |
|
"steps_per_print": 10, |
|
"wall_clock_breakdown": true, |
|
|
|
|
|
"deepspeed_extra_args": { |
|
"comms_logger": { |
|
"enabled": true, |
|
"verbose": true, |
|
"prof_all": true, |
|
"debug": false |
|
}, |
|
} |
|
} |
|
|