Disty0 commited on
Commit
561c3da
1 Parent(s): 8285dec

Upload 8 files

Browse files
dataset/base_tag_count.txt ADDED
The diff for this file is too large to render. See raw diff
 
dataset/base_word_count.txt ADDED
The diff for this file is too large to render. See raw diff
 
dataset/kohya-command-base.sh ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/sh
2
+
3
+ CURRENT=$1
4
+ CURRENT_SUB=$2
5
+
6
+ PAST=$3
7
+ PAST_SUB=$4
8
+
9
+ OUT_PATH="/home/ubuntu/out/sotediffusion-wr3_3b"
10
+ DATASET_PATH="/home/ubuntu/dataset/combined"
11
+ PROMPT_PATH="/home/ubuntu/sotediffusion-prompt.txt"
12
+ WUER_PATH="/home/ubuntu/models"
13
+
14
+
15
+ accelerate launch --mixed_precision bf16 --multi_gpu stable_cascade_train_stage_c.py \
16
+ --mixed_precision bf16 \
17
+ --save_precision fp16 \
18
+ --sdpa \
19
+ --ddp_static_graph \
20
+ --train_text_encoder \
21
+ --resolution "1024,1024" \
22
+ --train_batch_size 16 \
23
+ --gradient_checkpointing \
24
+ --learning_rate 1e-5 \
25
+ --learning_rate_te1 4e-6 \
26
+ --lr_scheduler constant_with_warmup \
27
+ --lr_warmup_steps 100 \
28
+ --optimizer_type adafactor \
29
+ --optimizer_args "scale_parameter=False" "relative_step=False" "warmup_init=False" \
30
+ --token_warmup_min 1 \
31
+ --token_warmup_step 0 \
32
+ --shuffle_caption \
33
+ --caption_separator ", " \
34
+ --caption_dropout_rate 0 \
35
+ --caption_tag_dropout_rate 0 \
36
+ --caption_dropout_every_n_epochs 0 \
37
+ --dataset_repeats 1 \
38
+ --save_state \
39
+ --save_every_n_steps 1024 \
40
+ --sample_every_n_steps 512 \
41
+ --max_token_length 225 \
42
+ --max_train_epochs 1 \
43
+ --caption_extension ".txt" \
44
+ --max_data_loader_n_workers 16 \
45
+ --persistent_data_loader_workers \
46
+ --enable_bucket \
47
+ --min_bucket_reso 256 \
48
+ --max_bucket_reso 4096 \
49
+ --bucket_reso_steps 64 \
50
+ --bucket_no_upscale \
51
+ --log_with tensorboard \
52
+ --output_name sotediffusion-wr3_3b \
53
+ --train_data_dir $DATASET_PATH-$CURRENT_SUB \
54
+ --in_json $DATASET_PATH-$CURRENT_SUB.json \
55
+ --output_dir $OUT_PATH-$CURRENT/$CURRENT_SUB \
56
+ --logging_dir $OUT_PATH-$CURRENT/$CURRENT_SUB/logs \
57
+ --resume $OUT_PATH-$PAST/$PAST_SUB/sotediffusion-wr3_3b-state \
58
+ --stage_c_checkpoint_path $OUT_PATH-$PAST/$PAST_SUB/sotediffusion-wr3_3b.safetensors \
59
+ --text_model_checkpoint_path $OUT_PATH-$PAST/$PAST_SUB/sotediffusion-wr3_3b_text_model.safetensors \
60
+ --effnet_checkpoint_path $WUER_PATH/effnet_encoder.safetensors \
61
+ --previewer_checkpoint_path $WUER_PATH/previewer.safetensors \
62
+ --sample_prompts $PROMPT_PATH
dataset/kohya-command-final.sh ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/sh
2
+
3
+ /home/ubuntu/dataset/combined/newest-best
4
+
5
+ PAST=3
6
+ PAST_SUB=1
7
+
8
+ OUT_PATH="/home/ubuntu/out/sotediffusion-wr3_3b"
9
+ PROMPT_PATH="/home/ubuntu/sotediffusion-prompt.txt"
10
+ WUER_PATH="/home/ubuntu/models"
11
+
12
+
13
+ accelerate launch --mixed_precision bf16 --multi_gpu stable_cascade_train_stage_c.py \
14
+ --mixed_precision bf16 \
15
+ --save_precision fp16 \
16
+ --sdpa \
17
+ --ddp_static_graph \
18
+ --resolution "1024,1024" \
19
+ --train_batch_size 16 \
20
+ --gradient_checkpointing \
21
+ --learning_rate 4e-6 \
22
+ --learning_rate_te1 0 \
23
+ --lr_scheduler constant_with_warmup \
24
+ --lr_warmup_steps 100 \
25
+ --optimizer_type adafactor \
26
+ --optimizer_args "scale_parameter=False" "relative_step=False" "warmup_init=False" \
27
+ --token_warmup_min 1 \
28
+ --token_warmup_step 0 \
29
+ --shuffle_caption \
30
+ --caption_separator ", " \
31
+ --caption_dropout_rate 0 \
32
+ --caption_tag_dropout_rate 0 \
33
+ --caption_dropout_every_n_epochs 0 \
34
+ --dataset_repeats 1 \
35
+ --save_state \
36
+ --save_every_n_steps 1024 \
37
+ --sample_every_n_steps 512 \
38
+ --max_token_length 225 \
39
+ --max_train_epochs 16 \
40
+ --caption_extension ".txt" \
41
+ --max_data_loader_n_workers 16 \
42
+ --persistent_data_loader_workers \
43
+ --enable_bucket \
44
+ --min_bucket_reso 256 \
45
+ --max_bucket_reso 4096 \
46
+ --bucket_reso_steps 64 \
47
+ --bucket_no_upscale \
48
+ --log_with tensorboard \
49
+ --output_name sotediffusion-wr3_3b \
50
+ --train_data_dir /home/ubuntu/dataset/newest-best \
51
+ --in_json /home/ubuntu/dataset/newest-best.json \
52
+ --output_dir /home/ubuntu/out/final \
53
+ --logging_dir /home/ubuntu/out/final/logs \
54
+ --stage_c_checkpoint_path $OUT_PATH-$PAST/$PAST_SUB/sotediffusion-wr3_3b.safetensors \
55
+ --text_model_checkpoint_path $OUT_PATH-$PAST/$PAST_SUB/sotediffusion-wr3_3b_text_model.safetensors \
56
+ --effnet_checkpoint_path $WUER_PATH/effnet_encoder.safetensors \
57
+ --previewer_checkpoint_path $WUER_PATH/previewer.safetensors \
58
+ --sample_prompts $PROMPT_PATH
dataset/sotediffusion-wr3_3b-base.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0db90cd4a4a737bf699302b9fd19333f27ed802ce787687106fb9971e821f8e4
3
+ size 7178349784
dataset/train.sh ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+
3
+ EPOCH=1
4
+ CURRENT_SUB=0
5
+
6
+ PAST=0
7
+ PAST_SUB=4
8
+
9
+ while true
10
+ do
11
+ for i in $(seq 0 4)
12
+ do
13
+ echo "EPOCH: $EPOCH SUB: $i PAST_EPOCH: $PAST PAST_SUB: $PAST_SUB"
14
+ /home/ubuntu/kohya-command-base.sh $EPOCH $i $PAST $PAST_SUB
15
+ sleep 1
16
+ PAST=$EPOCH
17
+
18
+ PAST_SUB=$i
19
+ done
20
+
21
+ PAST=$EPOCH
22
+ EPOCH=$(($EPOCH+1))
23
+ echo ""
24
+ echo "STARTING NEW EPOCH: $EPOCH"
25
+ echo ""
26
+ done
27
+
sotediffusion-wr3_3b.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:310249788a0dfe857d3db5efd9693bf8cf3896e1c77d98feaa9caa8f3ddbf338
3
+ size 7178351136
sotediffusion-wr3_text_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2135638aaa3bf73df0b1c78fc6e35bb745abc4f21526f88fa1a75dc48ec99eee
3
+ size 1389382144