Jccqqqqq commited on
Commit
8726b78
·
1 Parent(s): 98a9bfc

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -0
app.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from diffusers import StableDiffusionControlNetPipeline, ControlNetModel
2
+ from diffusers import DiffusionPipeline
3
+ import torch
4
+ from controlnet_aux import OpenposeDetector
5
+ from diffusers import UniPCMultistepScheduler
6
+ from PIL import Image
7
+ import requests
8
+ from io import BytesIO
9
+ import gradio as gr
10
+
11
+ model = OpenposeDetector.from_pretrained("lllyasviel/ControlNet")
12
+ controlnet = ControlNetModel.from_pretrained("fusing/stable-diffusion-v1-5-controlnet-openpose", torch_dtype=torch.float16)
13
+ model_id = "Jccqqqqq/Personajes"
14
+ pipe = StableDiffusionControlNetPipeline.from_pretrained(
15
+ model_id,
16
+ controlnet=controlnet,
17
+ torch_dtype=torch.float16,
18
+ )
19
+ pipe.scheduler = UniPCMultistepScheduler.from_config(pipe.scheduler.config)
20
+ pipe.enable_model_cpu_offload()
21
+ generators = []
22
+ def generate(prompt, inputimage, seed, steps, negative):
23
+ generator = torch.Generator(device="cpu").manual_seed(seed)
24
+ response = requests.get(inputimage)
25
+ img = Image.open(BytesIO(response.content))
26
+ img = model(img)
27
+ img.save("/content/test.png")
28
+ image = pipe(prompt,img,negative_prompt=negative,generator=generator,num_inference_steps=steps).images[0]
29
+ return image
30
+ demo = gr.Interface(fn=generate, inputs=[gr.Textbox(placeholder="Prompt"),gr.Textbox(placeholder="Image"), gr.Number(precision=0, label="seed"), gr.Number(precision=0, label="steps", value=20),gr.Textbox(placeholder="Negative Prompt",value="monochrome, lowres, bad anatomy, worst quality, low quality")], outputs=gr.Image(type="pil"), title="test")
31
+
32
+ if __name__ == "__main__":
33
+ demo.queue()
34
+ demo.launch(inline=False, debug=True)