upload
Browse files
Chabasirav2_config/config_file.toml
ADDED
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[model_arguments]
|
2 |
+
v2 = false
|
3 |
+
v_parameterization = false
|
4 |
+
pretrained_model_name_or_path = "/content/pretrained_model/Animefull-final-pruned.ckpt"
|
5 |
+
|
6 |
+
[additional_network_arguments]
|
7 |
+
no_metadata = false
|
8 |
+
unet_lr = 0.0001
|
9 |
+
text_encoder_lr = 5e-5
|
10 |
+
network_module = "lycoris.kohya"
|
11 |
+
network_dim = 32
|
12 |
+
network_alpha = 16
|
13 |
+
network_args = [ "conv_dim=32", "conv_alpha=16", "algo=lora",]
|
14 |
+
network_train_unet_only = false
|
15 |
+
network_train_text_encoder_only = false
|
16 |
+
|
17 |
+
[optimizer_arguments]
|
18 |
+
optimizer_type = "AdamW8bit"
|
19 |
+
learning_rate = 0.0001
|
20 |
+
max_grad_norm = 1.0
|
21 |
+
lr_scheduler = "constant"
|
22 |
+
lr_warmup_steps = 0
|
23 |
+
|
24 |
+
[dataset_arguments]
|
25 |
+
debug_dataset = false
|
26 |
+
in_json = "/content/LoRA/meta_lat.json"
|
27 |
+
train_data_dir = "/content/LoRA/train_data"
|
28 |
+
dataset_repeats = 8
|
29 |
+
shuffle_caption = true
|
30 |
+
keep_tokens = 0
|
31 |
+
resolution = "512,512"
|
32 |
+
caption_dropout_rate = 0
|
33 |
+
caption_tag_dropout_rate = 0
|
34 |
+
caption_dropout_every_n_epochs = 0
|
35 |
+
color_aug = false
|
36 |
+
token_warmup_min = 1
|
37 |
+
token_warmup_step = 0
|
38 |
+
|
39 |
+
[training_arguments]
|
40 |
+
output_dir = "/content/LoRA/output"
|
41 |
+
output_name = "Chabasirav2"
|
42 |
+
save_precision = "fp16"
|
43 |
+
save_every_n_epochs = 1
|
44 |
+
train_batch_size = 2
|
45 |
+
max_token_length = 225
|
46 |
+
mem_eff_attn = false
|
47 |
+
xformers = true
|
48 |
+
max_train_epochs = 10
|
49 |
+
max_data_loader_n_workers = 8
|
50 |
+
persistent_data_loader_workers = true
|
51 |
+
gradient_checkpointing = false
|
52 |
+
gradient_accumulation_steps = 1
|
53 |
+
mixed_precision = "fp16"
|
54 |
+
clip_skip = 2
|
55 |
+
logging_dir = "/content/LoRA/logs"
|
56 |
+
log_prefix = "Chabasirav2"
|
57 |
+
lowram = true
|
58 |
+
|
59 |
+
[sample_prompt_arguments]
|
60 |
+
sample_every_n_epochs = 999999
|
61 |
+
sample_sampler = "ddim"
|
62 |
+
|
63 |
+
[saving_arguments]
|
64 |
+
save_model_as = "safetensors"
|
Chabasirav2_config/sample_prompt.txt
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
|
2 |
+
masterpiece, best quality, 1girl, aqua eyes, baseball cap, blonde hair, closed mouth, earrings, green background, hat, hoop earrings, jewelry, looking at viewer, shirt, short hair, simple background, solo, upper body, yellow shirt --n lowres, bad anatomy, bad hands, text, error, missing fingers, extra digit, fewer digits, cropped, worst quality, low quality, normal quality, jpeg artifacts, signature, watermark, username, blurry --w 512 --h 768 --l 7 --s 28
|