File size: 1,658 Bytes
c0a5eb4
 
 
1e8e9e3
c0a5eb4
 
 
1e8e9e3
c0a5eb4
1e8e9e3
 
 
c0a5eb4
1e8e9e3
c0a5eb4
1e8e9e3
c0a5eb4
1e8e9e3
 
c0a5eb4
1e8e9e3
c0a5eb4
 
1e8e9e3
 
c0a5eb4
 
 
1e8e9e3
c0a5eb4
 
1e8e9e3
c0a5eb4
 
1e8e9e3
 
 
 
 
 
 
 
 
 
 
 
 
c0a5eb4
 
 
1e8e9e3
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import gradio as gr
import numpy as np
import random
from diffusers import DiffusionPipeline, EulerDiscreteScheduler
import torch

device = "cuda" if torch.cuda.is_available() else "cpu"
torch_dtype = torch.float16 if torch.cuda.is_available() else torch.float32

# Define model and checkpoint
model_repo_id = "ByteDance/SDXL-Lightning"
ckpt = "sdxl_lightning_4step_unet.safetensors"

# Load the diffusion pipeline
pipe = DiffusionPipeline.from_pretrained(model_repo_id, torch_dtype=torch_dtype)
pipe.to(device)

# Update the scheduler
pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config, timestep_spacing="trailing")

MAX_SEED = np.iinfo(np.int32).max


def generate_and_display(prompt, num_inference_steps=10, guidance_scale=0):
    generator = torch.Generator(device).manual_seed(random.randint(0, MAX_SEED))
    image = pipe(
        prompt=prompt,
        num_inference_steps=num_inference_steps,
        guidance_scale=guidance_scale,
        generator=generator,
    ).images[0]
    return image


@gr.Interface(
    fn=generate_and_display,
    inputs=[
        gr.Textbox(label="Prompt", placeholder="Enter your creative prompt here"),
        gr.Slider(1, 50, value=10, label="Number of Inference Steps"),
        gr.Slider(0.0, 10.0, value=7.5, label="Guidance Scale"),
    ],
    outputs=gr.Image(label="Generated Image"),
    title="Veshon: Your Creative AI Assistant",
    description="Generate stunning visuals effortlessly with cutting-edge technology!",
)
def launch_demo():
    gr.launch(server_name="0.0.0.0", server_port=8080)  # Use a specified port for local testing


if __name__ == "__main__":
    launch_demo()