bharatcoder
commited on
increase max tokens for prompt.
Browse files
app.py
CHANGED
@@ -62,7 +62,7 @@ def expand_idea(promptgen_model, idea_text):
|
|
62 |
'content': f'For the given idea, generate a text prompt to generate an image from a text to image generator. Be creative and include both subject and style prompts into one. Do not, explain your decisions. Idea: {idea_text}',
|
63 |
},
|
64 |
],
|
65 |
-
max_tokens=
|
66 |
temperature=1.1, # Set temperature higher for dynamic responses
|
67 |
top_p=0.9,
|
68 |
)
|
|
|
62 |
'content': f'For the given idea, generate a text prompt to generate an image from a text to image generator. Be creative and include both subject and style prompts into one. Do not, explain your decisions. Idea: {idea_text}',
|
63 |
},
|
64 |
],
|
65 |
+
max_tokens=100,
|
66 |
temperature=1.1, # Set temperature higher for dynamic responses
|
67 |
top_p=0.9,
|
68 |
)
|