added input_prompt outside of conditions
Browse files
app.py
CHANGED
@@ -151,9 +151,11 @@ def generation(message, history):
|
|
151 |
if type(hist[0]) == tuple:
|
152 |
image_path = hist[0][0]
|
153 |
|
|
|
|
|
154 |
if image_path is None:
|
155 |
chat_mode["mode"] = "text"
|
156 |
-
input_prompt = message if isinstance(message, str) else message.get("text", "")
|
157 |
client = openai.OpenAI(api_key=API_KEY)
|
158 |
stream = client.chat.completions.create(
|
159 |
model="gpt-3.5-turbo",
|
@@ -164,6 +166,7 @@ def generation(message, history):
|
|
164 |
return stream
|
165 |
else:
|
166 |
chat_mode["mode"] = "image"
|
|
|
167 |
base64_image = encode_image(image_path=image_path)
|
168 |
client = openai.OpenAI(api_key=API_KEY)
|
169 |
stream = client.chat.completions.create(
|
|
|
151 |
if type(hist[0]) == tuple:
|
152 |
image_path = hist[0][0]
|
153 |
|
154 |
+
input_prompt = message if isinstance(message, str) else message.get("text", "")
|
155 |
+
|
156 |
if image_path is None:
|
157 |
chat_mode["mode"] = "text"
|
158 |
+
# input_prompt = message if isinstance(message, str) else message.get("text", "")
|
159 |
client = openai.OpenAI(api_key=API_KEY)
|
160 |
stream = client.chat.completions.create(
|
161 |
model="gpt-3.5-turbo",
|
|
|
166 |
return stream
|
167 |
else:
|
168 |
chat_mode["mode"] = "image"
|
169 |
+
# input_prompt = message if isinstance(message, str) else message.get("text", "")
|
170 |
base64_image = encode_image(image_path=image_path)
|
171 |
client = openai.OpenAI(api_key=API_KEY)
|
172 |
stream = client.chat.completions.create(
|