benjamin-paine commited on
Commit
310a01c
·
verified ·
1 Parent(s): 13b51c6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +50 -20
app.py CHANGED
@@ -21,14 +21,11 @@ from transformers import (
21
  GemmaTokenizer
22
  )
23
 
 
24
  device = "cuda" if torch.cuda.is_available() else "cpu"
25
  model_repo_id = "Alpha-VLLM/Lumina-Image-2.0"
26
  transformer_repo_id = "benjamin-paine/Lumina-Image-2.0" # Temporarily fixed, change when main repo gets updated
27
-
28
- if torch.cuda.is_available():
29
- torch_dtype = torch.bfloat16
30
- else:
31
- torch_dtype = torch.float32
32
 
33
  ###
34
  transformer = Lumina2Transformer2DModel.from_pretrained(transformer_repo_id, subfolder="transformer")
@@ -60,6 +57,9 @@ def infer(
60
  height=1024,
61
  guidance_scale=4.0,
62
  num_inference_steps=30,
 
 
 
63
  progress=gr.Progress(track_tqdm=True),
64
  ):
65
  if randomize_seed:
@@ -75,6 +75,9 @@ def infer(
75
  width=width,
76
  height=height,
77
  generator=generator,
 
 
 
78
  ).images[0]
79
 
80
  return image, seed
@@ -98,7 +101,7 @@ with gr.Blocks(css=css) as demo:
98
  prompt = gr.Text(
99
  label="Prompt",
100
  show_label=False,
101
- max_lines=1,
102
  placeholder="Enter your prompt",
103
  container=False,
104
  )
@@ -108,21 +111,29 @@ with gr.Blocks(css=css) as demo:
108
  result = gr.Image(label="Result", show_label=False)
109
 
110
  with gr.Accordion("Advanced Settings", open=False):
111
- negative_prompt = gr.Text(
112
- label="Negative prompt",
113
- max_lines=1,
114
- placeholder="Enter a negative prompt",
115
- )
 
 
 
 
 
 
 
116
 
117
- seed = gr.Slider(
118
- label="Seed",
119
- minimum=0,
120
- maximum=MAX_SEED,
121
- step=1,
122
- value=0,
123
- )
124
-
125
- randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
 
126
 
127
  with gr.Row():
128
  width = gr.Slider(
@@ -158,6 +169,22 @@ with gr.Blocks(css=css) as demo:
158
  value=30,
159
  )
160
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
161
  gr.Examples(examples=examples, inputs=[prompt], outputs=[result, seed], fn=infer, cache_examples=True, cache_mode="lazy")
162
 
163
  gr.on(
@@ -172,6 +199,9 @@ with gr.Blocks(css=css) as demo:
172
  height,
173
  guidance_scale,
174
  num_inference_steps,
 
 
 
175
  ],
176
  outputs=[result, seed],
177
  )
 
21
  GemmaTokenizer
22
  )
23
 
24
+ default_system_prompt = "You are an assistant designed to generate superior images with the superior degree of image-text alignment based on textual prompts or user prompts."
25
  device = "cuda" if torch.cuda.is_available() else "cpu"
26
  model_repo_id = "Alpha-VLLM/Lumina-Image-2.0"
27
  transformer_repo_id = "benjamin-paine/Lumina-Image-2.0" # Temporarily fixed, change when main repo gets updated
28
+ torch_dtype = torch.float32
 
 
 
 
29
 
30
  ###
31
  transformer = Lumina2Transformer2DModel.from_pretrained(transformer_repo_id, subfolder="transformer")
 
57
  height=1024,
58
  guidance_scale=4.0,
59
  num_inference_steps=30,
60
+ system_prompt=default_system_prompt,
61
+ cfg_normalization=True,
62
+ cfg_trunc_ratio=1.0,
63
  progress=gr.Progress(track_tqdm=True),
64
  ):
65
  if randomize_seed:
 
75
  width=width,
76
  height=height,
77
  generator=generator,
78
+ system_prompt=system_prompt,
79
+ cfg_normalization=cfg_normalization,
80
+ cfg_trunc_ratio=cfg_trunc_ratio
81
  ).images[0]
82
 
83
  return image, seed
 
101
  prompt = gr.Text(
102
  label="Prompt",
103
  show_label=False,
104
+ max_lines=4,
105
  placeholder="Enter your prompt",
106
  container=False,
107
  )
 
111
  result = gr.Image(label="Result", show_label=False)
112
 
113
  with gr.Accordion("Advanced Settings", open=False):
114
+ with gr.Row():
115
+ system_prompt = gr.Text(
116
+ label="System Prompt",
117
+ max_lines=4,
118
+ value=default_system_prompt
119
+ )
120
+
121
+ negative_prompt = gr.Text(
122
+ label="Negative prompt",
123
+ max_lines=4,
124
+ placeholder="Enter a negative prompt",
125
+ )
126
 
127
+ with gr.Row():
128
+ seed = gr.Slider(
129
+ label="Seed",
130
+ minimum=0,
131
+ maximum=MAX_SEED,
132
+ step=1,
133
+ value=0,
134
+ )
135
+
136
+ randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
137
 
138
  with gr.Row():
139
  width = gr.Slider(
 
169
  value=30,
170
  )
171
 
172
+ with gr.Row():
173
+ cfg_normalization = gr.Checkbox(
174
+ label="CFG Normalization",
175
+ value=True
176
+ )
177
+ cfg_trunc_ratio = gr.Slider(
178
+ label="CFG Truncation Ratio",
179
+ minimum=0.0,
180
+ maximum=1.0,
181
+ step=0.01,
182
+ value=1.0
183
+ )
184
+
185
+ with gr.Row():
186
+
187
+
188
  gr.Examples(examples=examples, inputs=[prompt], outputs=[result, seed], fn=infer, cache_examples=True, cache_mode="lazy")
189
 
190
  gr.on(
 
199
  height,
200
  guidance_scale,
201
  num_inference_steps,
202
+ system_prompt,
203
+ cfg_normalization,
204
+ cfg_trunc_ratio,
205
  ],
206
  outputs=[result, seed],
207
  )