Spaces:
Runtime error
Runtime error
TOT_CUDA="0,1" | |
CUDAs=(${TOT_CUDA//,/ }) | |
CUDA_NUM=${#CUDAs[@]} | |
# PORT="12345" | |
# ../Chinese-Vicuna/sample/instruct/chat_data.jsonl | |
#DATA_PATH="sample/instruct/legislation2.json" #"../dataset/instruction/guanaco_non_chat_mini_52K-utf8.json" #"./sample/merge_sample.json" | |
#DATA_PATH="./sample/instruct/chat_data.jsonl" | |
DATA_PATH="../Chinese-Vicuna/sample/legislation60k.jsonl" | |
#DATA_PATH="../Chinese-Vicuna/sample/instructchat_data.jsonl" #working | |
OUTPUT_PATH="../llama2-62kjudgement-20sept" | |
MODEL_PATH="../chinese-llama-2-13b" | |
# lora_checkpoint="../Llama2-Chinese-13b-Chat-LoRA" | |
from_data_beginning=True | |
TEST_SIZE=300 | |
#CUDA_VISIBLE_DEVICES=0 python finetune.py \ | |
TORCH_DISTRIBUTED_DEBUG=DETAIL CUDA_VISIBLE_DEVICES=${TOT_CUDA} torchrun --standalone --nnodes=1 --nproc_per_node=$CUDA_NUM finetune.py \ | |
--data_path $DATA_PATH \ | |
--output_path $OUTPUT_PATH \ | |
--model_path $MODEL_PATH \ | |
--eval_steps 200 \ | |
--save_steps 200 \ | |
--test_size $TEST_SIZE \ | |
# --resume_from_checkpoint $lora_checkpoint | |