donlucci77 commited on
Commit
78b3ee1
·
verified ·
1 Parent(s): d99f990

Update sek.toml

Browse files
Files changed (1) hide show
  1. sek.toml +112 -112
sek.toml CHANGED
@@ -1,112 +1,112 @@
1
- [[subsets]]
2
- caption_extension = ".txt"
3
- image_dir = "C:/Users/nousa/Desktop/sek"
4
- keep_tokens = 1
5
- name = "Main"
6
- num_repeats = 2
7
- random_crop = true
8
- shuffle_caption = true
9
- shuffle_caption_sigma = 10
10
-
11
- [train_mode]
12
- train_mode = "lora"
13
-
14
- [general_args.args]
15
- persistent_data_loader_workers = true
16
- mixed_precision = "bf16"
17
- gradient_checkpointing = true
18
- gradient_accumulation_steps = 6
19
- seed = 1337
20
- max_data_loader_n_workers = 1
21
- max_token_length = 225
22
- prior_loss_weight = 1.0
23
- xformers = true
24
- max_train_epochs = 19
25
- sdxl = true
26
- v_parameterization = true
27
- pretrained_model_name_or_path = "C:/Users/nousa/Music/stable-diffusion-webui-reForge/models/Stable-diffusion/NoobAI-XL-Vpred-v0.9r.safetensors"
28
-
29
- [general_args.dataset_args]
30
- resolution = 1024
31
- batch_size = 4
32
-
33
- [network_args.args]
34
- network_dim = 16
35
- network_alpha = 8.0
36
- min_timestep = 0
37
- max_timestep = 1000
38
- network_train_unet_only = true
39
-
40
- [optimizer_args.args]
41
- optimizer_type = "ADOPTAOScheduleFree"
42
- lr_scheduler = "constant_with_warmup"
43
- loss_type = "l2"
44
- warmup_ratio = 0.1
45
- scale_weight_norms = 99.0
46
- max_grad_norm = 0.25
47
- zero_terminal_snr = true
48
- learning_rate = 0.005
49
-
50
- [saving_args.args]
51
- save_precision = "bf16"
52
- save_model_as = "safetensors"
53
- save_last_n_epochs = 4
54
- save_every_n_epochs = 1
55
- save_toml = true
56
- output_dir = "C:/Users/nousa/Music/stable-diffusion-webui-reForge/models/Lora/sek"
57
- output_name = "sek"
58
- save_toml_location = "C:/Users/nousa/Music/stable-diffusion-webui-reForge/models/Lora/sek"
59
-
60
- [logging_args.args]
61
- logging_dir = ""
62
- log_tracker_name = "sdxl"
63
- log_with = "wandb"
64
- wandb_api_key = "5dd7ee6f1f34419f7fdce82419e25f5673c73ab2"
65
-
66
- [extra_args.args]
67
- validation_split = "0.2"
68
- validation_seed = "1337"
69
- validation_every_n_step = "10"
70
- max_validation_steps = "40"
71
- edm2_loss_weighting = "True"
72
- edm2_loss_weighting_optimizer = "LoraEasyCustomOptimizer.fmarscrop.FMARSCrop"
73
- edm2_loss_weighting_optimizer_lr = "2e-2"
74
- edm2_loss_weighting_optimizer_args = "{'eps_floor':1e-30, 'eps':1e-6, 'gamma':0.0005}"
75
- edm2_loss_weighting_lr_scheduler = "True"
76
- edm2_loss_weighting_lr_scheduler_warmup_percent = "0.1"
77
- edm2_loss_weighting_lr_scheduler_constant_percent = "0.9"
78
- edm2_loss_weighting_max_grad_norm = "0"
79
- edm2_loss_weighting_generate_graph_every_x_steps = "20"
80
- edm2_loss_weighting_generate_graph = "True"
81
- edm2_loss_weighting_num_channels = "448"
82
- edm2_loss_weighting_generate_graph_y_limit = "50"
83
- edm2_loss_weighting_lr_scheduler_decay_scaling = "1.5"
84
- sangoi_loss_modifier = "True"
85
- sangoi_loss_modifier_min_snr = "0"
86
- pin_data_loader_memory = "True"
87
- disable_cuda_reduced_precision_operations = "True"
88
- train_network_norm_modules_as_float32 = "True"
89
-
90
- [bucket_args.dataset_args]
91
- enable_bucket = true
92
- min_bucket_reso = 256
93
- max_bucket_reso = 2048
94
- bucket_reso_steps = 64
95
-
96
- [network_args.args.network_args]
97
- conv_dim = 16
98
- conv_alpha = 8.0
99
- algo = "locon"
100
- dora_wd = true
101
- train_norm = true
102
- use_scalar = "True"
103
-
104
- [optimizer_args.args.optimizer_args]
105
- betas = "0.9,0.9999"
106
- weight_decay = "1e-5"
107
- adaptive_clip = "0.5"
108
- eps = "1e-6"
109
- mars_gamma = "0.05"
110
- stable_weight_decay = "False"
111
- weight_decouple = "True"
112
- fisher = "False"
 
1
+ [[subsets]]
2
+ caption_extension = ".txt"
3
+ image_dir = "C:/Users/nousa/Desktop/sek"
4
+ keep_tokens = 1
5
+ name = "Main"
6
+ num_repeats = 2
7
+ random_crop = true
8
+ shuffle_caption = true
9
+ shuffle_caption_sigma = 10
10
+
11
+ [train_mode]
12
+ train_mode = "lora"
13
+
14
+ [general_args.args]
15
+ persistent_data_loader_workers = true
16
+ mixed_precision = "bf16"
17
+ gradient_checkpointing = true
18
+ gradient_accumulation_steps = 6
19
+ seed = 1337
20
+ max_data_loader_n_workers = 1
21
+ max_token_length = 225
22
+ prior_loss_weight = 1.0
23
+ xformers = true
24
+ max_train_epochs = 19
25
+ sdxl = true
26
+ v_parameterization = true
27
+ pretrained_model_name_or_path = "C:/Users/nousa/Music/stable-diffusion-webui-reForge/models/Stable-diffusion/NoobAI-XL-Vpred-v0.9r.safetensors"
28
+
29
+ [general_args.dataset_args]
30
+ resolution = 1024
31
+ batch_size = 4
32
+
33
+ [network_args.args]
34
+ network_dim = 16
35
+ network_alpha = 8.0
36
+ min_timestep = 0
37
+ max_timestep = 1000
38
+ network_train_unet_only = true
39
+
40
+ [optimizer_args.args]
41
+ optimizer_type = "ADOPTAOScheduleFree"
42
+ lr_scheduler = "constant_with_warmup"
43
+ loss_type = "l2"
44
+ warmup_ratio = 0.1
45
+ scale_weight_norms = 99.0
46
+ max_grad_norm = 0.25
47
+ zero_terminal_snr = true
48
+ learning_rate = 0.005
49
+
50
+ [saving_args.args]
51
+ save_precision = "bf16"
52
+ save_model_as = "safetensors"
53
+ save_last_n_epochs = 4
54
+ save_every_n_epochs = 1
55
+ save_toml = true
56
+ output_dir = "C:/Users/nousa/Music/stable-diffusion-webui-reForge/models/Lora/sek"
57
+ output_name = "sek"
58
+ save_toml_location = "C:/Users/nousa/Music/stable-diffusion-webui-reForge/models/Lora/sek"
59
+
60
+ [logging_args.args]
61
+ logging_dir = ""
62
+ log_tracker_name = "sdxl"
63
+ log_with = "wandb"
64
+
65
+
66
+ [extra_args.args]
67
+ validation_split = "0.2"
68
+ validation_seed = "1337"
69
+ validation_every_n_step = "10"
70
+ max_validation_steps = "40"
71
+ edm2_loss_weighting = "True"
72
+ edm2_loss_weighting_optimizer = "LoraEasyCustomOptimizer.fmarscrop.FMARSCrop"
73
+ edm2_loss_weighting_optimizer_lr = "2e-2"
74
+ edm2_loss_weighting_optimizer_args = "{'eps_floor':1e-30, 'eps':1e-6, 'gamma':0.0005}"
75
+ edm2_loss_weighting_lr_scheduler = "True"
76
+ edm2_loss_weighting_lr_scheduler_warmup_percent = "0.1"
77
+ edm2_loss_weighting_lr_scheduler_constant_percent = "0.9"
78
+ edm2_loss_weighting_max_grad_norm = "0"
79
+ edm2_loss_weighting_generate_graph_every_x_steps = "20"
80
+ edm2_loss_weighting_generate_graph = "True"
81
+ edm2_loss_weighting_num_channels = "448"
82
+ edm2_loss_weighting_generate_graph_y_limit = "50"
83
+ edm2_loss_weighting_lr_scheduler_decay_scaling = "1.5"
84
+ sangoi_loss_modifier = "True"
85
+ sangoi_loss_modifier_min_snr = "0"
86
+ pin_data_loader_memory = "True"
87
+ disable_cuda_reduced_precision_operations = "True"
88
+ train_network_norm_modules_as_float32 = "True"
89
+
90
+ [bucket_args.dataset_args]
91
+ enable_bucket = true
92
+ min_bucket_reso = 256
93
+ max_bucket_reso = 2048
94
+ bucket_reso_steps = 64
95
+
96
+ [network_args.args.network_args]
97
+ conv_dim = 16
98
+ conv_alpha = 8.0
99
+ algo = "locon"
100
+ dora_wd = true
101
+ train_norm = true
102
+ use_scalar = "True"
103
+
104
+ [optimizer_args.args.optimizer_args]
105
+ betas = "0.9,0.9999"
106
+ weight_decay = "1e-5"
107
+ adaptive_clip = "0.5"
108
+ eps = "1e-6"
109
+ mars_gamma = "0.05"
110
+ stable_weight_decay = "False"
111
+ weight_decouple = "True"
112
+ fisher = "False"