JoPmt commited on
Commit
74defa2
·
1 Parent(s): 414e178

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -6
app.py CHANGED
@@ -6,21 +6,26 @@ from transformers import pipeline
6
  from diffusers.utils import load_image
7
  from diffusers import DiffusionPipeline, DDPMScheduler
8
 
 
 
 
 
 
9
  accelerator = Accelerator(cpu=True)
10
- warp_prior = accelerator.prepare(DiffusionPipeline.from_pretrained("warp-ai/wuerstchen-prior", torch_dtype=torch.bfloat16, use_safetensors=True, safety_cheker=None))
11
- warp_prior.scheduler = DDPMScheduler.from_config(warp_prior.scheduler.config)
12
  warp_prior = warp_prior.to("cpu")
13
- warp = accelerator.prepare(DiffusionPipeline.from_pretrained("warp-ai/wuerstchen", torch_dtype=torch.bfloat16, use_safetensors=True, safety_checker=None))
14
- warp.scheduler = DDPMScheduler.from_config(warp.scheduler.config)
15
  warp = warp.to("cpu")
16
  generator = torch.Generator(device="cpu").manual_seed(random.randint(1, 4876364))
17
 
18
  def plex(cook, one, two):
19
  ###goof = load_image(img).resize((512, 512))
20
  negative_prompt = "lowres,text,bad quality,low quality,jpeg artifacts,ugly,bad hands,bad face,blurry,bad eyes,watermark,signature"
21
- warp_out = warp_prior(prompt=cook, height=512,width=512,negative_prompt=negative_prompt,guidance_scale=4.0, num_inference_steps=5,generator=generator,output_type="pt")
22
  primpt = ""
23
- imas = warp(**warp_out, height=512, width=512, num_inference_steps=5, prompt=cook,negative_prompt=primpt,guidance_scale=0.0,output_type="pil",generator=generator).images[0]
24
  return imas
25
 
26
  iface = gr.Interface(fn=plex,inputs=[gr.Textbox(label="prompt"), gr.Slider(label="Inference steps",minimum=1,step=1,maximum=10,value=5), gr.Slider(label="Prior guidance scale",minimum=4.1,step=0.1,maximum=19.9,value=4.1)], outputs=gr.Image(), title="Txt2Img Wrstchn SD", description="Txt2Img Wrstchn SD")
 
6
  from diffusers.utils import load_image
7
  from diffusers import DiffusionPipeline, DDPMScheduler
8
 
9
+ from diffusers import WuerstchenDecoderPipeline, WuerstchenPriorPipeline
10
+ from diffusers.pipelines.wuerstchen import DEFAULT_STAGE_C_TIMESTEPS, DDPMWuerstchenScheduler
11
+
12
+
13
+
14
  accelerator = Accelerator(cpu=True)
15
+ warp_prior = accelerator.prepare(WuerstchenPriorPipeline.from_pretrained("warp-ai/wuerstchen-prior", torch_dtype=torch.bfloat16, use_safetensors=True, safety_cheker=None))
16
+ warp_prior.scheduler = DDPMWuerstchenScheduler.from_config(warp_prior.scheduler.config)
17
  warp_prior = warp_prior.to("cpu")
18
+ warp = accelerator.prepare(WuerstchenDecoderPipeline.from_pretrained("warp-ai/wuerstchen", torch_dtype=torch.bfloat16, use_safetensors=True, safety_checker=None))
19
+ warp.scheduler = DDPMWuerstchenScheduler.from_config(warp.scheduler.config)
20
  warp = warp.to("cpu")
21
  generator = torch.Generator(device="cpu").manual_seed(random.randint(1, 4876364))
22
 
23
  def plex(cook, one, two):
24
  ###goof = load_image(img).resize((512, 512))
25
  negative_prompt = "lowres,text,bad quality,low quality,jpeg artifacts,ugly,bad hands,bad face,blurry,bad eyes,watermark,signature"
26
+ warp_out = warp_prior(prompt=cook, height=512,width=512,negative_prompt=negative_prompt,guidance_scale=4.0, num_inference_steps=5,generator=generator,)
27
  primpt = ""
28
+ imas = warp(warp_out.image_embbedings, height=512, width=512, num_inference_steps=5, prompt=cook,negative_prompt=primpt,guidance_scale=0.0,output_type="pil",generator=generator).images[0]
29
  return imas
30
 
31
  iface = gr.Interface(fn=plex,inputs=[gr.Textbox(label="prompt"), gr.Slider(label="Inference steps",minimum=1,step=1,maximum=10,value=5), gr.Slider(label="Prior guidance scale",minimum=4.1,step=0.1,maximum=19.9,value=4.1)], outputs=gr.Image(), title="Txt2Img Wrstchn SD", description="Txt2Img Wrstchn SD")