salomonsky commited on
Commit
06b8bc2
1 Parent(s): 5d264e2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -63,9 +63,9 @@ async def gen(prompt, basemodel, width, height, scales, steps, seed, upscale_fac
63
 
64
  async def improve_prompt(prompt):
65
  try:
66
- instruction = "Mejora mi prompt para texto a imagen en inglés con estilo, cinematografía, cámaras, atmósfera e iluminación para la mejor calidad, de máximo 200 palabras."
67
  formatted_prompt = f"{instruction}: {prompt}"
68
- response = llm_client.text_generation(formatted_prompt, max_new_tokens=200)
69
  improved_text = response['generated_text'].strip() if 'generated_text' in response else response.strip()
70
 
71
  return improved_text
@@ -106,4 +106,4 @@ with gr.Blocks(css=css, theme="Nymbo/Nymbo_Theme") as demo:
106
 
107
  btn = gr.Button("Generar")
108
  btn.click(fn=gen, inputs=[prompt, basemodel_choice, width, height, scales, steps, seed, upscale_factor, process_upscale, lora_model_choice, process_lora], outputs=output_res)
109
- demo.launch()
 
63
 
64
  async def improve_prompt(prompt):
65
  try:
66
+ instruction = "Improve and translate this prompt into English, adding detailed descriptions of style, cinematography, cameras, atmosphere, and lighting for the best quality, up to 200 words."
67
  formatted_prompt = f"{instruction}: {prompt}"
68
+ response = llm_client.text_generation(formatted_prompt, max_new_tokens=300) # Allowing more tokens for detailed description
69
  improved_text = response['generated_text'].strip() if 'generated_text' in response else response.strip()
70
 
71
  return improved_text
 
106
 
107
  btn = gr.Button("Generar")
108
  btn.click(fn=gen, inputs=[prompt, basemodel_choice, width, height, scales, steps, seed, upscale_factor, process_upscale, lora_model_choice, process_lora], outputs=output_res)
109
+ demo.launch()