import gradio as gr import os import requests import random import time from transformers import pipeline # Load the pipeline for text generation pipe = pipeline( "text-generation", model="Ar4ikov/gpt2-650k-stable-diffusion-prompt-generator", tokenizer="gpt2" ) # Initialize a list to store the history of generated prompts history = [] # Function to generate text based on input prompt and record the history def generate_text(prompt): generated_text = pipe(prompt, max_length=77)[0]["generated_text"] # Append the generated prompt and its result to the history list history.append({"prompt": prompt, "generated_text": generated_text}) return generated_text # Create a Gradio interface with history recording iface = gr.Interface( fn=generate_text, inputs=gr.Textbox(lines=5, label="Prompt"), outputs=gr.Textbox(label="Output", show_copy_button=True), title="AI Art Prompt Generator", description="Art Prompt Generator is a user-friendly interface designed to optimize input for AI Art Generator or Creator. For faster generation speeds, it's recommended to load the model locally with GPUs, as the online demo at Hugging Face Spaces utilizes CPU, resulting in slower processing times.", api_name="predict" ) name2 = "stabilityai/stable-diffusion-xl-base-1.0" models=[ gr.Interface.load(f"models/{name2}"), gr.Interface.load(f"models/{name2}"), gr.Interface.load(f"models/{name2}"), gr.Interface.load(f"models/{name2}"), gr.Interface.load(f"models/{name2}"), gr.Interface.load(f"models/{name2}"), gr.Interface.load(f"models/{name2}"), gr.Interface.load(f"models/{name2}"), gr.Interface.load(f"models/{name2}"), gr.Interface.load(f"models/{name2}"), gr.Interface.load(f"models/{name2}"), gr.Interface.load(f"models/{name2}"), gr.Interface.load(f"models/{name2}"), gr.Interface.load(f"models/{name2}"), gr.Interface.load(f"models/{name2}"), gr.Interface.load(f"models/{name2}"), gr.Interface.load(f"models/{name2}"), gr.Interface.load(f"models/{name2}"), gr.Interface.load(f"models/{name2}"), gr.Interface.load(f"models/{name2}"), ] #o = os.getenv("P") o = "V" m_out = ("""

""") loading=("""
""") def ac(): def clear(): return gr.update(value=0),gr.update(value=0) def start(): stamp = time.time() return gr.update(value=stamp),gr.update(value=0) def end(stamp): ts = stamp + 120 ti = time.time() if ti > ts and stamp != 0: return gr.update(value=1),gr.HTML.update(f"{m_out}",visible=True) else: return gr.update(value=0),None def im_fn(put,fac="",h=None): try: if h == o: put = f"{put}{fac}" fac = f"{fac} " rn = random.randint(0, 19) model=models[rn] return model(put),fac elif h != o: return(None,None) except Exception: return None, None def cl_fac(): return "",gr.HTML.update(f"{loading}") with gr.Blocks() as b: with gr.Row(): with gr.Column(): put = gr.Textbox() with gr.Column(): with gr.Row(): btn1 = gr.Button("Run") btn2 = gr.Button("Clear") message=gr.HTML("
") message2=gr.HTML("",visible=False) with gr.Row(): out1 = gr.Image() out2 = gr.Image() with gr.Row(): out3 = gr.Image() out4 = gr.Image() with gr.Row(visible=False): h=gr.Textbox(value="V") t_state=gr.Number() t_switch=gr.Textbox(value=0) def clear_all(): return "",None,None,None,None,None,None,1,gr.HTML.update("
") fac_b = gr.Textbox(value="", visible=False) def noth(): return gr.HTML.update("
") #a1=btn1.click(noth,None,btn1,every=1) btn1.click(cl_fac,None,[fac_b,message],show_progress=False) b1=btn1.click(start,None,[t_state,t_switch],show_progress=True) sta = t_state.change(end,t_state,[t_switch,message2],every=1,show_progress=True) b2=btn1.click(im_fn,[put,fac_b,h],[out1,fac_b], show_progress=True) b3=out1.change(im_fn,[put,fac_b,h],[out2,fac_b], show_progress=True) b4=out2.change(im_fn,[put,fac_b,h],[out3,fac_b], show_progress=True) b5=out3.change(im_fn,[put,fac_b,h],[out4,fac_b], show_progress=True) b6=out4.change(noth,None,message, show_progress=False) swi=t_switch.change(clear,None,[t_switch,fac_b], cancels=[sta,b2,b3,b4,b5],show_progress=False) #btn2.click(noth,None,message,cancels=[b1,sta,b2,b3,b4,b5,swi],show_progress=False) btn2.click(clear_all, None,[fac_b,put,out1,out2,out3,out4,t_state,t_switch,message],cancels=[b1,sta,b2,b3,b4,b5,swi],show_progress=False) b.queue(concurrency_count=100).launch(show_api=False) ac()