Datasets:
Languages:
Portuguese
Update autotrain.ipynb
Browse files- autotrain.ipynb +10 -9
autotrain.ipynb
CHANGED
@@ -543,15 +543,16 @@
|
|
543 |
" \"text_encoder_lr\": float(text_encoder_lr),\n",
|
544 |
" \"network_module\": network_module,\n",
|
545 |
" \"network_dim\": 64,\n",
|
546 |
-
" \"network_alpha\":
|
547 |
-
" \"training_comment\": \"
|
548 |
" },\n",
|
549 |
" \"optimizer_arguments\": {\n",
|
550 |
-
" \"optimizer_type\": \"
|
551 |
-
" \"optimizer_args\":
|
552 |
" \"learning_rate\": unet_lr,\n",
|
553 |
" \"max_grad_norm\": 1.0,\n",
|
554 |
-
" \"lr_scheduler\": \"
|
|
|
555 |
" },\n",
|
556 |
" \"dataset_arguments\": {\n",
|
557 |
" \"cache_latents\": True,\n",
|
@@ -573,16 +574,16 @@
|
|
573 |
" \"gradient_checkpointing\": False,\n",
|
574 |
" \"gradient_accumulation_steps\": 1,\n",
|
575 |
" \"mixed_precision\": \"fp16\",\n",
|
576 |
-
" \"clip_skip\":
|
577 |
" \"logging_dir\": \"/content/Dreamboot-Config/logs\",\n",
|
578 |
" \"log_prefix\": Loraname,\n",
|
579 |
" \"lowram\": True,\n",
|
580 |
-
" \"training_comment\" : \"train by
|
581 |
" },\n",
|
582 |
" \"sample_prompt_arguments\": {\n",
|
583 |
" \"sample_every_n_steps\": 200,\n",
|
584 |
" \"sample_every_n_epochs\": 1,\n",
|
585 |
-
" \"sample_sampler\": \"euler\",\n",
|
586 |
" },\n",
|
587 |
" \"dreambooth_arguments\": {\n",
|
588 |
" \"prior_loss_weight\": 1,\n",
|
@@ -598,7 +599,7 @@
|
|
598 |
" --w 512 \\\n",
|
599 |
" --h 768 \\\n",
|
600 |
" --l 7 \\\n",
|
601 |
-
" --s
|
602 |
"\"\"\"\n",
|
603 |
"config_path = os.path.join(config_dir, \"config_file.toml\")\n",
|
604 |
"prompt_path = os.path.join(config_dir, \"sample_prompt.txt\")\n",
|
|
|
543 |
" \"text_encoder_lr\": float(text_encoder_lr),\n",
|
544 |
" \"network_module\": network_module,\n",
|
545 |
" \"network_dim\": 64,\n",
|
546 |
+
" \"network_alpha\": 48,\n",
|
547 |
+
" \"training_comment\": \"GSGI Trainer\",\n",
|
548 |
" },\n",
|
549 |
" \"optimizer_arguments\": {\n",
|
550 |
+
" \"optimizer_type\": \"AdamW\",\n",
|
551 |
+
" \"optimizer_args\": ["weight_decay=0.2"] if not optimizer_args else optimizer_args,\n",
|
552 |
" \"learning_rate\": unet_lr,\n",
|
553 |
" \"max_grad_norm\": 1.0,\n",
|
554 |
+
" \"lr_scheduler\": \"cosine_with_restarts\",\n",
|
555 |
+
" \"lr_scheduler_num_cycles\": 4,\n",
|
556 |
" },\n",
|
557 |
" \"dataset_arguments\": {\n",
|
558 |
" \"cache_latents\": True,\n",
|
|
|
574 |
" \"gradient_checkpointing\": False,\n",
|
575 |
" \"gradient_accumulation_steps\": 1,\n",
|
576 |
" \"mixed_precision\": \"fp16\",\n",
|
577 |
+
" \"clip_skip\": 1,\n",
|
578 |
" \"logging_dir\": \"/content/Dreamboot-Config/logs\",\n",
|
579 |
" \"log_prefix\": Loraname,\n",
|
580 |
" \"lowram\": True,\n",
|
581 |
+
" \"training_comment\" : \"train by GSGI Trainer\",\n",
|
582 |
" },\n",
|
583 |
" \"sample_prompt_arguments\": {\n",
|
584 |
" \"sample_every_n_steps\": 200,\n",
|
585 |
" \"sample_every_n_epochs\": 1,\n",
|
586 |
+
" \"sample_sampler\": \"euler a\",\n",
|
587 |
" },\n",
|
588 |
" \"dreambooth_arguments\": {\n",
|
589 |
" \"prior_loss_weight\": 1,\n",
|
|
|
599 |
" --w 512 \\\n",
|
600 |
" --h 768 \\\n",
|
601 |
" --l 7 \\\n",
|
602 |
+
" --s 30\n",
|
603 |
"\"\"\"\n",
|
604 |
"config_path = os.path.join(config_dir, \"config_file.toml\")\n",
|
605 |
"prompt_path = os.path.join(config_dir, \"sample_prompt.txt\")\n",
|