Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -13,24 +13,11 @@ from huggingface_hub import hf_hub_download
|
|
13 |
|
14 |
os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
|
15 |
MAX_SEED = np.iinfo(np.int32).max
|
16 |
-
model = "black-forest-labs/FLUX.1-dev"
|
17 |
-
hf_hub_download(repo_id="black-forest-labs/FLUX.1-Fill-dev", filename="ae.safetensors", local_dir=".")
|
18 |
|
19 |
-
|
20 |
-
transformer = FluxTransformer2DModel.from_single_file(
|
21 |
-
"https://huggingface.co/black-forest-labs/FLUX.1-Fill-dev/blob/main/flux1-fill-dev.safetensors",
|
22 |
-
low_cpu_mem_usage=False,
|
23 |
-
ignore_mismatched_sizes=True,
|
24 |
-
torch_dtype=torch.bfloat16
|
25 |
-
)
|
26 |
-
vae = AutoencoderKL.from_pretrained("./ae.safetensors")
|
27 |
-
pipe = FluxFillPipeline.from_pretrained(
|
28 |
-
model,
|
29 |
-
vae=vae,
|
30 |
-
transformer=transformer,
|
31 |
-
torch_dtype=torch.bfloat16)
|
32 |
-
pipe.to("cuda")
|
33 |
|
|
|
|
|
34 |
|
35 |
@spaces.GPU()
|
36 |
def inpaintGen(
|
|
|
13 |
|
14 |
os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
|
15 |
MAX_SEED = np.iinfo(np.int32).max
|
|
|
|
|
16 |
|
17 |
+
repo_id = "black-forest-labs/FLUX.1-Fill-dev"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
|
19 |
+
if torch.cuda.is_available():
|
20 |
+
pipe = FluxFillPipeline.from_pretrained(repo_id, torch_dtype=torch.bfloat16).to("cuda")
|
21 |
|
22 |
@spaces.GPU()
|
23 |
def inpaintGen(
|