|
import gradio as gr |
|
import torch |
|
import numpy as np |
|
import modin.pandas as pd |
|
from PIL import Image |
|
from diffusers import DiffusionPipeline, StableDiffusionLatentUpscalePipeline |
|
|
|
device = "cuda" if torch.cuda.is_available() else "cpu" |
|
|
|
pipe = DiffusionPipeline.from_pretrained("dreamlike-art/dreamlike-photoreal-2.0", torch_dtype=torch.float16, safety_checker=None) |
|
upscaler = StableDiffusionLatentUpscalePipeline.from_pretrained("stabilityai/sd-x2-latent-upscaler", torch_dtype=torch.float16, safety_checker=None) |
|
upscaler = upscaler.to(device) |
|
pipe = pipe.to(device) |
|
|
|
def genie (Prompt, negative_prompt, height, width, scale, steps, seed, upscale): |
|
generator = torch.Generator(device=device).manual_seed(seed) |
|
if upscale == "Yes": |
|
low_res_latents = pipe(Prompt, negative_prompt=negative_prompt, height=height, width=width, num_inference_steps=steps, guidance_scale=scale, generator=generator, output_type="latent").images |
|
image = upscaler(Prompt, negative_prompt=negative_prompt, image=low_res_latents, num_inference_steps=5, guidance_scale=0, generator=generator).images[0] |
|
else: |
|
image = pipe(Prompt, negative_prompt=negative_prompt, height=height, width=width, num_inference_steps=steps, guidance_scale=scale, generator=generator).images[0] |
|
return image |
|
|
|
gr.Interface(fn=genie, inputs=[gr.Textbox(label='What you want the AI to generate. 77 Token Limit.'), |
|
gr.Textbox(label='What you Do Not want the AI to generate. 77 Token Limit'), |
|
gr.Slider(512, 1024, 768, step=128, label='Height'), |
|
gr.Slider(512, 1024, 768, step=128, label='Width'), |
|
gr.Slider(1, maximum=15, value=10, step=.25), |
|
gr.Slider(25, maximum=100, value=50, step=25), |
|
gr.Slider(minimum=1, step=1, maximum=9999999999999999, randomize=True), |
|
gr.Radio(["Yes", "No"], label='Upscale?'), |
|
], |
|
outputs=gr.Image(label='Generated Image'), |
|
title="PhotoReal V2 with SD x2 Upscaler - GPU", |
|
description="<br><br><b/>Warning: This Demo is capable of producing NSFW content.", |
|
article = "Code Monkey: <a href=\"https://huggingface.co/Manjushri\">Manjushri</a>").launch(debug=True, max_threads=True) |