Spaces:
Running
Running
guardiancc
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -2156,7 +2156,7 @@ def update_selection(evt: gr.SelectData, width, height):
|
|
2156 |
height,
|
2157 |
)
|
2158 |
|
2159 |
-
@spaces.GPU
|
2160 |
def generate_image(prompt_mash, steps, seed, cfg_scale, width, height, lora_scale, progress):
|
2161 |
pipe.to("cuda")
|
2162 |
generator = torch.Generator(device="cuda").manual_seed(seed)
|
@@ -2193,7 +2193,7 @@ def generate_image_to_image(prompt_mash, image_input_path, image_strength, steps
|
|
2193 |
).images[0]
|
2194 |
return final_image
|
2195 |
|
2196 |
-
@spaces.GPU
|
2197 |
def run_lora(prompt, image_input, image_strength, cfg_scale, steps, selected_index, randomize_seed, seed, width, height, lora_scale, progress=gr.Progress(track_tqdm=True)):
|
2198 |
if selected_index is None:
|
2199 |
raise gr.Error("You must select a LoRA before proceeding.🧨")
|
|
|
2156 |
height,
|
2157 |
)
|
2158 |
|
2159 |
+
@spaces.GPU
|
2160 |
def generate_image(prompt_mash, steps, seed, cfg_scale, width, height, lora_scale, progress):
|
2161 |
pipe.to("cuda")
|
2162 |
generator = torch.Generator(device="cuda").manual_seed(seed)
|
|
|
2193 |
).images[0]
|
2194 |
return final_image
|
2195 |
|
2196 |
+
@spaces.GPU
|
2197 |
def run_lora(prompt, image_input, image_strength, cfg_scale, steps, selected_index, randomize_seed, seed, width, height, lora_scale, progress=gr.Progress(track_tqdm=True)):
|
2198 |
if selected_index is None:
|
2199 |
raise gr.Error("You must select a LoRA before proceeding.🧨")
|