NCJ commited on
Commit
af6bfff
1 Parent(s): b927b5d

add zerogpu dev

Browse files
Files changed (2) hide show
  1. demo/img_gen.py +4 -5
  2. demo/relighting_gen.py +4 -5
demo/img_gen.py CHANGED
@@ -1,4 +1,5 @@
1
  import gradio as gr
 
2
  import torch
3
  import torch.nn.functional as F
4
  from diffusers import StableDiffusionPipeline, DPMSolverMultistepScheduler
@@ -6,17 +7,15 @@ from diffusers import StableDiffusionPipeline, DPMSolverMultistepScheduler
6
 
7
  model_id = "stabilityai/stable-diffusion-2-1"
8
 
9
- device = torch.device('cpu')
10
- dtype = torch.float32
11
- if torch.cuda.is_available():
12
- device = torch.device('cuda')
13
- dtype = torch.float16
14
 
15
  pipe = StableDiffusionPipeline.from_pretrained(model_id, torch_dtype=dtype)
16
  pipe.scheduler = DPMSolverMultistepScheduler.from_config(pipe.scheduler.config)
17
  pipe = pipe.to(device)
18
 
19
 
 
20
  def img_gen(prompt, seed, steps, cfg, down_from_768=False, progress=gr.Progress(track_tqdm=True)):
21
  generator = torch.Generator(device=device).manual_seed(int(seed))
22
  hw = 512 if not down_from_768 else 768
 
1
  import gradio as gr
2
+ import spaces
3
  import torch
4
  import torch.nn.functional as F
5
  from diffusers import StableDiffusionPipeline, DPMSolverMultistepScheduler
 
7
 
8
  model_id = "stabilityai/stable-diffusion-2-1"
9
 
10
+ device = torch.device('cuda')
11
+ dtype = torch.float16
 
 
 
12
 
13
  pipe = StableDiffusionPipeline.from_pretrained(model_id, torch_dtype=dtype)
14
  pipe.scheduler = DPMSolverMultistepScheduler.from_config(pipe.scheduler.config)
15
  pipe = pipe.to(device)
16
 
17
 
18
+ @spaces.GPU
19
  def img_gen(prompt, seed, steps, cfg, down_from_768=False, progress=gr.Progress(track_tqdm=True)):
20
  generator = torch.Generator(device=device).manual_seed(int(seed))
21
  hw = 512 if not down_from_768 else 768
demo/relighting_gen.py CHANGED
@@ -1,16 +1,14 @@
1
  import imageio
2
  import numpy as np
 
3
  import torch
4
  from diffusers import UniPCMultistepScheduler, StableDiffusionControlNetPipeline
5
  from diffusers.utils import get_class_from_dynamic_module
6
 
7
  from tqdm import tqdm
8
 
9
- device = torch.device('cpu')
10
- dtype = torch.float32
11
- if torch.cuda.is_available():
12
- device = torch.device('cuda')
13
- dtype = torch.float16
14
 
15
  NeuralTextureControlNetModel = get_class_from_dynamic_module(
16
  "dilightnet/model_helpers",
@@ -28,6 +26,7 @@ pipe.scheduler = UniPCMultistepScheduler.from_config(pipe.scheduler.config)
28
  pipe.set_progress_bar_config(disable=True)
29
 
30
 
 
31
  def relighting_gen(masked_ref_img, mask, cond_path, frames, prompt, steps, seed, cfg):
32
  mask = mask[..., :1] / 255.
33
  for i in tqdm(range(frames)):
 
1
  import imageio
2
  import numpy as np
3
+ import spaces
4
  import torch
5
  from diffusers import UniPCMultistepScheduler, StableDiffusionControlNetPipeline
6
  from diffusers.utils import get_class_from_dynamic_module
7
 
8
  from tqdm import tqdm
9
 
10
+ device = torch.device('cuda')
11
+ dtype = torch.float16
 
 
 
12
 
13
  NeuralTextureControlNetModel = get_class_from_dynamic_module(
14
  "dilightnet/model_helpers",
 
26
  pipe.set_progress_bar_config(disable=True)
27
 
28
 
29
+ @spaces.GPU
30
  def relighting_gen(masked_ref_img, mask, cond_path, frames, prompt, steps, seed, cfg):
31
  mask = mask[..., :1] / 255.
32
  for i in tqdm(range(frames)):