Spaces:
Running
on
Zero
Running
on
Zero
File size: 1,400 Bytes
9a13713 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 |
import torch
from diffusers import DiffusionPipeline, LCMScheduler, UNet2DConditionModel
import logging
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
unet = UNet2DConditionModel.from_pretrained(
"gvecchio/StableMaterials",
subfolder="unet_lcm",
torch_dtype=torch.float16,
)
pipe = DiffusionPipeline.from_pretrained(
"gvecchio/StableMaterials",
trust_remote_code=True,
unet=unet,
torch_dtype=torch.float16
).to(device)
pipe.scheduler = LCMScheduler.from_config(pipe.scheduler.config)
def generate_material(prompt, seed=-1, resolution=512, refinement=False):
try:
seed = seed if seed != -1 else torch.randint(0, 10000, (1,)).item()
logger.info(f"Generating images for prompt: {prompt} with seed: {seed}")
generator = torch.Generator(device=pipe.device).manual_seed(seed)
image = pipe(
prompt=[prompt],
tileable=True,
num_images_per_prompt=1,
num_inference_steps=4,
generator=generator,
).images[0]
image = image.resize((resolution, resolution))
if refinement:
pass
return image
except Exception as e:
logger.error(f"Exception occurred while generating images: {e}")
raise
|