ORKAFILM commited on
Commit
d9aceb8
·
verified ·
1 Parent(s): 756a835

Upload folder using huggingface_hub

Browse files
Files changed (2) hide show
  1. config.yaml +4 -5
  2. lora.safetensors +1 -1
config.yaml CHANGED
@@ -12,7 +12,7 @@ config:
12
  linear_alpha: 16
13
  save:
14
  dtype: float16
15
- save_every: 1001
16
  max_step_saves_to_keep: 1
17
  datasets:
18
  - folder_path: input_images
@@ -22,12 +22,11 @@ config:
22
  cache_latents_to_disk: false
23
  cache_latents: true
24
  resolution:
25
- - 512
26
  - 768
27
  - 1024
28
  train:
29
  batch_size: 1
30
- steps: 1000
31
  gradient_accumulation_steps: 1
32
  train_unet: true
33
  train_text_encoder: false
@@ -35,7 +34,7 @@ config:
35
  gradient_checkpointing: true
36
  noise_scheduler: flowmatch
37
  optimizer: adamw8bit
38
- lr: 0.0004
39
  ema_config:
40
  use_ema: true
41
  ema_decay: 0.99
@@ -46,7 +45,7 @@ config:
46
  quantize: true
47
  sample:
48
  sampler: flowmatch
49
- sample_every: 1001
50
  width: 1024
51
  height: 1024
52
  prompts: []
 
12
  linear_alpha: 16
13
  save:
14
  dtype: float16
15
+ save_every: 2001
16
  max_step_saves_to_keep: 1
17
  datasets:
18
  - folder_path: input_images
 
22
  cache_latents_to_disk: false
23
  cache_latents: true
24
  resolution:
 
25
  - 768
26
  - 1024
27
  train:
28
  batch_size: 1
29
+ steps: 2000
30
  gradient_accumulation_steps: 1
31
  train_unet: true
32
  train_text_encoder: false
 
34
  gradient_checkpointing: true
35
  noise_scheduler: flowmatch
36
  optimizer: adamw8bit
37
+ lr: 0.0002
38
  ema_config:
39
  use_ema: true
40
  ema_decay: 0.99
 
45
  quantize: true
46
  sample:
47
  sampler: flowmatch
48
+ sample_every: 2001
49
  width: 1024
50
  height: 1024
51
  prompts: []
lora.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4878b4d1bddc66de5a9d93d564a447089cdca2d94a1da697e8ab0980a4a7cec8
3
  size 171969416
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:129debb7275f6e22d4d03802d6c168afb1e84082045de4a497c569829c8b9c09
3
  size 171969416