Spaces:
Runtime error
Runtime error
File size: 8,789 Bytes
d51b5dd b2fe07f d51b5dd c8f9fe3 d51b5dd ea57346 19e34c6 d51b5dd 9034e05 d51b5dd d07ec16 d51b5dd d27b2fd d51b5dd e2aff85 d51b5dd b2fe07f 0151cf2 b2fe07f d51b5dd 310f3be d51b5dd 7bb0abc 0151cf2 923b328 d51b5dd 923b328 d51b5dd 923b328 d51b5dd 923b328 d51b5dd 4c4b76e b2fe07f 0151cf2 d51b5dd 1d25a7a 0151cf2 d51b5dd 0151cf2 e11de81 c616786 3a2a07e b773031 d51b5dd 3a2a07e d51b5dd 0151cf2 3a2a07e 2c54e8d d51b5dd 73c177a 9c9d80a 5665b85 ee7218f b773031 2233cc7 d51b5dd 9adedb7 489fb41 4ee3340 9adedb7 401dc2f 9adedb7 2175cd2 489fb41 2175cd2 9adedb7 b773031 3aa9985 4ee3340 9adedb7 280001e 4ee3340 9adedb7 280001e 4ee3340 9adedb7 4ee3340 9adedb7 310f3be 9adedb7 4ee3340 280001e 9adedb7 4ee3340 f64ce17 4ee3340 f64ce17 4ee3340 a5af3f9 4ee3340 f64ce17 9adedb7 280001e 4ee3340 280001e 2175cd2 9adedb7 4ee3340 280001e 310f3be 489fb41 73c177a 057e5d6 4ee3340 057e5d6 a5af3f9 c6b2e9c 280001e d51b5dd f463d0a d51b5dd c18ab4f 97bfe1e 59c9e51 923b328 1ee4a39 923b328 59c9e51 d51b5dd 1ee4a39 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 |
from pathlib import Path
from num2words import num2words
import numpy as np
import os
import random
import re
import torch
import json
from shapely.geometry.polygon import Polygon
from shapely.affinity import scale
from PIL import Image, ImageDraw, ImageOps, ImageFilter, ImageFont, ImageColor
os.system('pip install gradio==2.7.5')
import gradio as gr
from transformers import AutoTokenizer, AutoConfig, AutoModelForCausalLM
finetuned = AutoModelForCausalLM.from_pretrained('model')
tokenizer = AutoTokenizer.from_pretrained('gpt2')
device = "cuda:0" if torch.cuda.is_available() else "cpu"
print(device)
finetuned = finetuned.to(device)
# Utility functions
def containsNumber(value):
for character in value:
if character.isdigit():
return True
return False
def creativity(intensity):
if(intensity == 'Low'):
top_p = 0.95
top_k = 10
elif(intensity == 'Medium'):
top_p = 0.9
top_k = 50
if(intensity == 'High'):
top_p = 0.85
top_k = 100
return top_p, top_k
housegan_labels = {"living_room": 1, "kitchen": 2, "bedroom": 3, "bathroom": 4, "missing": 5, "closet": 6,
"balcony": 7, "corridor": 8, "dining_room": 9, "laundry_room": 10}
architext_colors = [[0, 0, 0], [249, 222, 182], [195, 209, 217], [250, 120, 128], [126, 202, 234], [190, 0, 198], [255, 255, 255],
[6, 53, 17], [17, 33, 58], [132, 151, 246], [197, 203, 159], [6, 53, 17],]
regex = re.compile(".*?\((.*?)\)")
def draw_polygons(polygons, colors, im_size=(512, 512), b_color="white", fpath=None):
image = Image.new("RGBA", im_size, color="white")
draw = ImageDraw.Draw(image)
for poly, color, in zip(polygons, colors):
#get initial polygon coordinates
xy = poly.exterior.xy
coords = np.dstack((xy[1], xy[0])).flatten()
# draw it on canvas, with the appropriate colors
draw.polygon(list(coords), fill=(0, 0, 0))
#get inner polygon coordinates
small_poly = poly.buffer(-1, resolution=32, cap_style=2, join_style=2, mitre_limit=5.0)
if small_poly.geom_type == 'MultiPolygon':
mycoordslist = [list(x.exterior.coords) for x in small_poly]
for coord in mycoordslist:
coords = np.dstack((np.array(coord)[:,1], np.array(coord)[:, 0])).flatten()
draw.polygon(list(coords), fill=tuple(color))
elif poly.geom_type == 'Polygon':
#get inner polygon coordinates
xy2 = small_poly.exterior.xy
coords2 = np.dstack((xy2[1], xy2[0])).flatten()
# draw it on canvas, with the appropriate colors
draw.polygon(list(coords2), fill=tuple(color))
image = image.transpose(Image.FLIP_TOP_BOTTOM)
if(fpath):
image.save(fpath, quality=100, subsampling=0)
return draw, image
def prompt_to_layout(user_prompt, intensity, fpath=None):
if(containsNumber(user_prompt) == True):
spaced_prompt = user_prompt.split(' ')
new_prompt = ' '.join([word if word.isdigit() == False else num2words(int(word)).lower() for word in spaced_prompt])
model_prompt = '[User prompt] {} [Layout]'.format(new_prompt)
top_p, top_k = creativity(intensity)
model_prompt = '[User prompt] {} [Layout]'.format(user_prompt)
input_ids = tokenizer(model_prompt, return_tensors='pt').to(device)
output = finetuned.generate(**input_ids, do_sample=True, top_p=top_p, top_k=top_k,
eos_token_id=50256, max_length=400)
output = tokenizer.batch_decode(output, skip_special_tokens=True)
layout = output[0].split('[User prompt]')[1].split('[Layout] ')[1].split(', ')
spaces = [txt.split(':')[0] for txt in layout]
coords = [txt.split(':')[1].rstrip() for txt in layout]
coordinates = [re.findall(regex, coord) for coord in coords]
num_coords = []
for coord in coordinates:
temp = []
for xy in coord:
numbers = xy.split(',')
temp.append(tuple([int(num)/14.2 for num in numbers]))
num_coords.append(temp)
new_spaces = []
for i, v in enumerate(spaces):
totalcount = spaces.count(v)
count = spaces[:i].count(v)
new_spaces.append(v + str(count + 1) if totalcount > 1 else v)
out_dict = dict(zip(new_spaces, num_coords))
out_dict = json.dumps(out_dict)
polygons = []
for coord in coordinates:
polygons.append([point.split(',') for point in coord])
geom = []
for poly in polygons:
scaled_poly = scale(Polygon(np.array(poly, dtype=int)), xfact=2, yfact=2, origin=(0,0))
geom.append(scaled_poly)
colors = [architext_colors[housegan_labels[space]] for space in spaces]
_, im = draw_polygons(geom, colors, fpath=fpath)
html = '<img class="labels" src="images/labels.png" />'
legend = Image.open("labels.png")
imgs_comb = np.vstack([im, legend])
imgs_comb = Image.fromarray(imgs_comb)
return imgs_comb, out_dict
# Gradio App
custom_css="""
@import url("https://use.typekit.net/nid3pfr.css");
.gradio_wrapper .gradio_bg[is_embedded=false] {
min-height: 80%;
}
.gradio_wrapper .gradio_bg[is_embedded=false] .gradio_page {
display: flex;
width: 100vw;
min-height: 50vh;
flex-direction: column;
justify-content: center;
align-items: center;
margin: 0px;
max-width: 100vw;
background: #FFFFFF;
}
.gradio_wrapper .gradio_bg[is_embedded=false] .gradio_page .content {
padding: 0px;
margin: 0px;
}
.gradio_interface {
width: 100vw;
max-width: 1500px;
}
.gradio_interface .panel:nth-child(2) .component:nth-child(3) {
display:none
}
.gradio_wrapper .gradio_bg[theme=default] .panel_buttons {
justify-content: flex-end;
}
.gradio_wrapper .gradio_bg[theme=default] .panel_button {
flex: 0 0 0;
min-width: 150px;
}
.gradio_wrapper .gradio_bg[theme=default] .gradio_interface .panel_button.submit {
background: #11213A;
border-radius: 5px;
color: #FFFFFF;
text-transform: uppercase;
min-width: 150px;
height: 4em;
letter-spacing: 0.15em;
flex: 0 0 0;
}
.gradio_wrapper .gradio_bg[theme=default] .gradio_interface .panel_button.submit:hover {
background: #000000;
}
.input_text:focus {
border-color: #FA7880;
}
.gradio_wrapper .gradio_bg[theme=default] .gradio_interface .input_text input,
.gradio_wrapper .gradio_bg[theme=default] .gradio_interface .input_text textarea {
font: 200 45px garamond-premier-pro-display, serif;
line-height: 110%;
color: #11213A;
border-radius: 5px;
padding: 15px;
border: none;
background: #F2F4F4;
}
.input_text textarea:focus-visible {
outline: none;
}
.gradio_wrapper .gradio_bg[theme=default] .gradio_interface .input_radio .radio_item.selected {
background-color: #11213A;
}
.gradio_wrapper .gradio_bg[theme=default] .gradio_interface .input_radio .selected .radio_circle {
border-color: #4365c4;
}
.gradio_wrapper .gradio_bg[theme=default] .gradio_interface .output_image {
width: 100%;
height: 40vw;
max-height: 630px;
}
.gradio_wrapper .gradio_bg[theme=default] .gradio_interface .output_image .image_preview_holder {
background: transparent;
}
.panel:nth-child(1) {
margin-left: 50px;
margin-right: 50px;
margin-bottom: 80px;
max-width: 750px;
}
.panel {
background: transparent;
}
.gradio_wrapper .gradio_bg[theme=default] .gradio_interface .component_set {
background: transparent;
box-shadow: none;
}
.panel:nth-child(2) .gradio_wrapper .gradio_bg[theme=default] .gradio_interface .panel_header {
display: none;
}
.gradio_wrapper .gradio_bg[is_embedded=false] .gradio_page .footer {
transform: scale(0.75);
filter: grayscale(1);
}
.labels {
height: 20px;
width: auto;
}
@media (max-width: 1000px){
.panel:nth-child(1) {
margin-left: 0px;
margin-right: 0px;
}
.gradio_wrapper .gradio_bg[theme=default] .gradio_interface .output_image {
height: auto;
}
}
"""
creative_slider = gr.inputs.Radio(["Low", "Medium", "High"], default="Low", label='Creativity')
textbox = gr.inputs.Textbox(placeholder='An apartment with two bedrooms and one bathroom', lines="3",
label="DESCRIBE YOUR IDEAL APARTMENT")
generated = gr.outputs.Image(label='Generated Layout')
layout = gr.outputs.Textbox(label='Layout Coordinates')
iface = gr.Interface(fn=prompt_to_layout, inputs=[textbox, creative_slider],
outputs=[generated, layout],
css=custom_css,
theme="default",
allow_flagging='never',
allow_screenshot=False,
thumbnail="thumbnail_gradio.PNG")
iface.launch(enable_queue=True) |