Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -5,13 +5,12 @@ from os import path
|
|
5 |
from safetensors.torch import load_file
|
6 |
import huggingface_hub
|
7 |
from huggingface_hub import hf_hub_download
|
8 |
-
|
9 |
-
|
10 |
cache_path = path.join(path.dirname(path.abspath(__file__)), "models")
|
11 |
os.environ["TRANSFORMERS_CACHE"] = cache_path
|
12 |
os.environ["HF_HUB_CACHE"] = cache_path
|
13 |
os.environ["HF_HOME"] = cache_path
|
14 |
-
|
15 |
import gradio as gr
|
16 |
import torch
|
17 |
from diffusers import FluxPipeline
|
@@ -92,7 +91,7 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
92 |
with gr.Column(scale=4):
|
93 |
output = gr.Image(label="Your Generated Image")
|
94 |
|
95 |
-
|
96 |
def process_image(height, width, steps, scales, prompt, seed, preset_prompt):
|
97 |
global pipe
|
98 |
with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16), timer("inference"):
|
|
|
5 |
from safetensors.torch import load_file
|
6 |
import huggingface_hub
|
7 |
from huggingface_hub import hf_hub_download
|
8 |
+
import os
|
|
|
9 |
cache_path = path.join(path.dirname(path.abspath(__file__)), "models")
|
10 |
os.environ["TRANSFORMERS_CACHE"] = cache_path
|
11 |
os.environ["HF_HUB_CACHE"] = cache_path
|
12 |
os.environ["HF_HOME"] = cache_path
|
13 |
+
import spaces
|
14 |
import gradio as gr
|
15 |
import torch
|
16 |
from diffusers import FluxPipeline
|
|
|
91 |
with gr.Column(scale=4):
|
92 |
output = gr.Image(label="Your Generated Image")
|
93 |
|
94 |
+
@spaces.GPU
|
95 |
def process_image(height, width, steps, scales, prompt, seed, preset_prompt):
|
96 |
global pipe
|
97 |
with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16), timer("inference"):
|