Update train_model.sh
Browse files- train_model.sh +0 -1
train_model.sh
CHANGED
@@ -19,4 +19,3 @@ torchrun --nproc_per_node=1 --master_port=9292 train.py \
|
|
19 |
--logging_steps 1 \
|
20 |
--fsdp "full_shard auto_wrap" \
|
21 |
--fsdp_transformer_layer_cls_to_wrap 'LLaMADecoderLayer' \
|
22 |
-
--tf32 True \
|
|
|
19 |
--logging_steps 1 \
|
20 |
--fsdp "full_shard auto_wrap" \
|
21 |
--fsdp_transformer_layer_cls_to_wrap 'LLaMADecoderLayer' \
|
|