benjamin-paine commited on
Commit
a904d5b
·
verified ·
1 Parent(s): e58bc29

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -22
app.py CHANGED
@@ -59,9 +59,7 @@ def infer(
59
  width=1024,
60
  height=1024,
61
  guidance_scale=4.0,
62
- num_inference_steps=50,
63
- cfg_normalization=True,
64
- cfg_trunc_ratio=0.25,
65
  progress=gr.Progress(track_tqdm=True),
66
  ):
67
  if randomize_seed:
@@ -76,8 +74,6 @@ def infer(
76
  num_inference_steps=num_inference_steps,
77
  width=width,
78
  height=height,
79
- cfg_normalization=cfg_normalization,
80
- cfg_trunc_ratio=cfg_trunc_ratio,
81
  generator=generator,
82
  ).images[0]
83
 
@@ -126,20 +122,6 @@ with gr.Blocks(css=css) as demo:
126
  value=0,
127
  )
128
 
129
- with gr.Row():
130
- cfg_normalization = gr.Checkbox(
131
- label="CFG Normalization",
132
- value=True
133
- )
134
-
135
- cfg_trunc_ratio = gr.Slider(
136
- label="CFG Truncation Ratio",
137
- minimum=0.0,
138
- maximum=1.0,
139
- step=0.05,
140
- value=0.25,
141
- )
142
-
143
  randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
144
 
145
  with gr.Row():
@@ -173,7 +155,7 @@ with gr.Blocks(css=css) as demo:
173
  minimum=1,
174
  maximum=100,
175
  step=1,
176
- value=50,
177
  )
178
 
179
  gr.Examples(examples=examples, inputs=[prompt], outputs=[result, seed], fn=infer, cache_examples=True, cache_mode="lazy")
@@ -190,8 +172,6 @@ with gr.Blocks(css=css) as demo:
190
  height,
191
  guidance_scale,
192
  num_inference_steps,
193
- cfg_normalization,
194
- cfg_trunc_ratio,
195
  ],
196
  outputs=[result, seed],
197
  )
 
59
  width=1024,
60
  height=1024,
61
  guidance_scale=4.0,
62
+ num_inference_steps=30,
 
 
63
  progress=gr.Progress(track_tqdm=True),
64
  ):
65
  if randomize_seed:
 
74
  num_inference_steps=num_inference_steps,
75
  width=width,
76
  height=height,
 
 
77
  generator=generator,
78
  ).images[0]
79
 
 
122
  value=0,
123
  )
124
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
125
  randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
126
 
127
  with gr.Row():
 
155
  minimum=1,
156
  maximum=100,
157
  step=1,
158
+ value=30,
159
  )
160
 
161
  gr.Examples(examples=examples, inputs=[prompt], outputs=[result, seed], fn=infer, cache_examples=True, cache_mode="lazy")
 
172
  height,
173
  guidance_scale,
174
  num_inference_steps,
 
 
175
  ],
176
  outputs=[result, seed],
177
  )