Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -51,7 +51,7 @@ MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "2048"))
|
|
51 |
USE_TORCH_COMPILE = os.getenv("USE_TORCH_COMPILE") == "1"
|
52 |
ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD") == "1"
|
53 |
|
54 |
-
MODEL = os.getenv("MODEL", "https://huggingface.co/
|
55 |
|
56 |
torch.backends.cudnn.deterministic = True
|
57 |
torch.backends.cudnn.benchmark = False
|
@@ -846,4 +846,4 @@ with gr.Blocks(css="style.css", theme="NoCrypt/[email protected]") as demo:
|
|
846 |
outputs=[result, gr_metadata],
|
847 |
api_name=False,
|
848 |
)
|
849 |
-
demo.queue(max_size=
|
|
|
51 |
USE_TORCH_COMPILE = os.getenv("USE_TORCH_COMPILE") == "1"
|
52 |
ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD") == "1"
|
53 |
|
54 |
+
MODEL = os.getenv("MODEL", "https://huggingface.co/cagliostrolab/animagine-xl-3.0/blob/main/animagine-xl-3.0.safetensors")
|
55 |
|
56 |
torch.backends.cudnn.deterministic = True
|
57 |
torch.backends.cudnn.benchmark = False
|
|
|
846 |
outputs=[result, gr_metadata],
|
847 |
api_name=False,
|
848 |
)
|
849 |
+
demo.queue(max_size=30).launch(debug=IS_COLAB, share=IS_COLAB)
|