John6666 commited on
Commit
1cbe518
1 Parent(s): 3910b1e

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +22 -29
  2. externalmod.py +27 -0
app.py CHANGED
@@ -1,11 +1,8 @@
1
  import gradio as gr
2
- import os
3
- import sys
4
- from pathlib import Path
5
  from all_models import models
6
- from externalmod import gr_Interface_load
7
  from prompt_extend import extend_prompt
8
- from random import randint
9
  import asyncio
10
  from threading import RLock
11
  lock = RLock()
@@ -32,53 +29,47 @@ def send_it1(inputs, model_choice, neg_input, height, width, steps, cfg, seed):
32
 
33
  # https://huggingface.co/docs/api-inference/detailed_parameters
34
  # https://huggingface.co/docs/huggingface_hub/package_reference/inference_client
35
- async def infer(model_index, prompt, nprompt="", height=None, width=None, steps=None, cfg=None, seed=-1, timeout=inference_timeout):
36
- from pathlib import Path
37
  kwargs = {}
38
- if height is not None and height >= 256: kwargs["height"] = height
39
- if width is not None and width >= 256: kwargs["width"] = width
40
- if steps is not None and steps >= 1: kwargs["num_inference_steps"] = steps
41
- if cfg is not None and cfg > 0: cfg = kwargs["guidance_scale"] = cfg
42
- noise = ""
43
- if seed >= 0: kwargs["seed"] = seed
44
- else:
45
- rand = randint(1, 500)
46
- for i in range(rand):
47
- noise += " "
48
  task = asyncio.create_task(asyncio.to_thread(models2[model_index].fn,
49
- prompt=f'{prompt} {noise}', negative_prompt=nprompt, **kwargs, token=HF_TOKEN))
50
  await asyncio.sleep(0)
51
  try:
52
  result = await asyncio.wait_for(task, timeout=timeout)
53
  except asyncio.TimeoutError as e:
54
  print(e)
55
- print(f"Task timed out: {models2[model_index]}")
56
  if not task.done(): task.cancel()
57
  result = None
58
- raise Exception(f"Task timed out: {models2[model_index]}")
59
  except Exception as e:
60
  print(e)
61
  if not task.done(): task.cancel()
62
  result = None
63
- raise Exception(e)
64
  if task.done() and result is not None and not isinstance(result, tuple):
65
  with lock:
66
  png_path = "image.png"
67
- result.save(png_path)
68
- image = str(Path(png_path).resolve())
69
  return image
70
  return None
71
 
72
- def gen_fn(model_index, prompt, nprompt="", height=None, width=None, steps=None, cfg=None, seed=-1):
73
  try:
74
  loop = asyncio.new_event_loop()
75
  result = loop.run_until_complete(infer(model_index, prompt, nprompt,
76
  height, width, steps, cfg, seed, inference_timeout))
77
  except (Exception, asyncio.CancelledError) as e:
78
  print(e)
79
- print(f"Task aborted: {models2[model_index]}")
80
  result = None
81
- raise gr.Error(f"Task aborted: {models2[model_index]}, Error: {e}")
82
  finally:
83
  loop.close()
84
  return result
@@ -127,8 +118,9 @@ with gr.Blocks(theme='John6666/YntecDark', fill_width=True, css=css) as myface:
127
  height = gr.Slider(label="Height", info="If 0, the default value is used.", maximum=1216, step=32, value=0, elem_classes=["gr-box", "gr-input"])
128
  with gr.Row():
129
  steps = gr.Slider(label="Number of inference steps", info="If 0, the default value is used.", maximum=100, step=1, value=0, elem_classes=["gr-box", "gr-input"])
130
- cfg = gr.Slider(label="Guidance scale", info="If 0, the default value is used.", maximum=30.0, step=0.1, value=0, elem_classes=["gr-box", "gr-input"])
131
  seed = gr.Slider(label="Seed", info="Randomize Seed if -1.", minimum=-1, maximum=MAX_SEED, step=1, value=-1, elem_classes=["gr-box", "gr-input"])
 
132
  run = gr.Button("Generate Image", variant="primary", elem_classes="gr-button")
133
 
134
  with gr.Row():
@@ -153,8 +145,9 @@ with gr.Blocks(theme='John6666/YntecDark', fill_width=True, css=css) as myface:
153
  concurrency_limit=None,
154
  queue=False,
155
  )
156
- use_short.click(short_prompt, inputs=[input_text], outputs=magic1, queue=False)
157
- see_prompts.click(text_it1, inputs=[input_text], outputs=magic1, queue=False)
 
158
 
159
  myface.queue(default_concurrency_limit=200, max_size=200)
160
  myface.launch(show_api=False, max_threads=400)
 
1
  import gradio as gr
2
+ import os
 
 
3
  from all_models import models
4
+ from externalmod import gr_Interface_load, save_image, randomize_seed
5
  from prompt_extend import extend_prompt
 
6
  import asyncio
7
  from threading import RLock
8
  lock = RLock()
 
29
 
30
  # https://huggingface.co/docs/api-inference/detailed_parameters
31
  # https://huggingface.co/docs/huggingface_hub/package_reference/inference_client
32
+ async def infer(model_index, prompt, nprompt="", height=0, width=0, steps=0, cfg=0, seed=-1, timeout=inference_timeout):
 
33
  kwargs = {}
34
+ if height > 0: kwargs["height"] = height
35
+ if width > 0: kwargs["width"] = width
36
+ if steps > 0: kwargs["num_inference_steps"] = steps
37
+ if cfg > 0: cfg = kwargs["guidance_scale"] = cfg
38
+ if seed == -1: kwargs["seed"] = randomize_seed()
39
+ else: kwargs["seed"] = seed
 
 
 
 
40
  task = asyncio.create_task(asyncio.to_thread(models2[model_index].fn,
41
+ prompt=prompt, negative_prompt=nprompt, **kwargs, token=HF_TOKEN))
42
  await asyncio.sleep(0)
43
  try:
44
  result = await asyncio.wait_for(task, timeout=timeout)
45
  except asyncio.TimeoutError as e:
46
  print(e)
47
+ print(f"Task timed out: {models[model_index]}")
48
  if not task.done(): task.cancel()
49
  result = None
50
+ raise Exception(f"Task timed out: {models[model_index]}") from e
51
  except Exception as e:
52
  print(e)
53
  if not task.done(): task.cancel()
54
  result = None
55
+ raise Exception() from e
56
  if task.done() and result is not None and not isinstance(result, tuple):
57
  with lock:
58
  png_path = "image.png"
59
+ image = save_image(result, png_path, models[model_index], prompt, nprompt, height, width, steps, cfg, seed)
 
60
  return image
61
  return None
62
 
63
+ def gen_fn(model_index, prompt, nprompt="", height=0, width=0, steps=0, cfg=0, seed=-1):
64
  try:
65
  loop = asyncio.new_event_loop()
66
  result = loop.run_until_complete(infer(model_index, prompt, nprompt,
67
  height, width, steps, cfg, seed, inference_timeout))
68
  except (Exception, asyncio.CancelledError) as e:
69
  print(e)
70
+ print(f"Task aborted: {models[model_index]}")
71
  result = None
72
+ raise gr.Error(f"Task aborted: {models[model_index]}, Error: {e}")
73
  finally:
74
  loop.close()
75
  return result
 
118
  height = gr.Slider(label="Height", info="If 0, the default value is used.", maximum=1216, step=32, value=0, elem_classes=["gr-box", "gr-input"])
119
  with gr.Row():
120
  steps = gr.Slider(label="Number of inference steps", info="If 0, the default value is used.", maximum=100, step=1, value=0, elem_classes=["gr-box", "gr-input"])
121
+ cfg = gr.Slider(label="Guidance scale", info="If 0, the default value is used.", maximum=30.0, step=0.1, value=-1, elem_classes=["gr-box", "gr-input"])
122
  seed = gr.Slider(label="Seed", info="Randomize Seed if -1.", minimum=-1, maximum=MAX_SEED, step=1, value=-1, elem_classes=["gr-box", "gr-input"])
123
+ seed_rand = gr.Button("Randomize Seed 🎲", size="sm", variant="secondary")
124
  run = gr.Button("Generate Image", variant="primary", elem_classes="gr-button")
125
 
126
  with gr.Row():
 
145
  concurrency_limit=None,
146
  queue=False,
147
  )
148
+ use_short.click(short_prompt, inputs=[input_text], outputs=magic1)
149
+ see_prompts.click(text_it1, inputs=[input_text], outputs=magic1)
150
+ seed_rand.click(randomize_seed, None, [seed], queue=False)
151
 
152
  myface.queue(default_concurrency_limit=200, max_size=200)
153
  myface.launch(show_api=False, max_threads=400)
externalmod.py CHANGED
@@ -583,3 +583,30 @@ def find_model_list(author: str="", tags: list[str]=[], not_tag="", sort: str="l
583
  models.append(model.id)
584
  if len(models) == limit: break
585
  return models
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
583
  models.append(model.id)
584
  if len(models) == limit: break
585
  return models
586
+
587
+
588
+ def save_image(image, savefile, modelname, prompt, nprompt, height=0, width=0, steps=0, cfg=0, seed=-1):
589
+ from PIL import Image, PngImagePlugin
590
+ import json
591
+ try:
592
+ metadata = {"prompt": prompt, "negative_prompt": nprompt, "Model": {"Model": modelname.split("/")[-1]}}
593
+ if steps > 0: metadata["num_inference_steps"] = steps
594
+ if cfg > 0: metadata["guidance_scale"] = cfg
595
+ if seed != -1: metadata["seed"] = seed
596
+ if width > 0 and height > 0: metadata["resolution"] = f"{width} x {height}"
597
+ metadata_str = json.dumps(metadata)
598
+ info = PngImagePlugin.PngInfo()
599
+ info.add_text("metadata", metadata_str)
600
+ image.save(savefile, "PNG", pnginfo=info)
601
+ return str(Path(savefile).resolve())
602
+ except Exception as e:
603
+ print(f"Failed to save image file: {e}")
604
+ raise Exception(f"Failed to save image file:") from e
605
+
606
+
607
+ def randomize_seed():
608
+ from random import seed, randint
609
+ MAX_SEED = 2**32-1
610
+ seed()
611
+ rseed = randint(0, MAX_SEED)
612
+ return rseed