Update app.py
Browse files
app.py
CHANGED
@@ -138,23 +138,25 @@ def chat_with_model(prompt, document_section, model_choice='gpt-3.5-turbo'):
|
|
138 |
collected_chunks = []
|
139 |
collected_messages = []
|
140 |
|
141 |
-
|
142 |
-
#for chunk in response:
|
143 |
-
#collected_chunks.append(chunk) # save the event response
|
144 |
-
#chunk_message = chunk['choices'][0]['delta'] # extract the message
|
145 |
-
#collected_messages.append(chunk_message) # save the message
|
146 |
-
#content=chunk["choices"][0].get("delta",{}).get("content")
|
147 |
-
# join method to concatenate the elements of the list
|
148 |
-
# into a single string,
|
149 |
-
# then strip out any empty strings
|
150 |
-
|
151 |
-
for resp in openai.ChatCompletion.create(
|
152 |
model='gpt-3.5-turbo',
|
153 |
messages=conversation,
|
154 |
temperature=0.5,
|
155 |
stream=True
|
156 |
):
|
157 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
158 |
result = "".join(report).strip()
|
159 |
result = result.replace("\n", "")
|
160 |
res_box.markdown(f'*{result}*')
|
|
|
138 |
collected_chunks = []
|
139 |
collected_messages = []
|
140 |
|
141 |
+
for chunk in openai.ChatCompletion.create(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
142 |
model='gpt-3.5-turbo',
|
143 |
messages=conversation,
|
144 |
temperature=0.5,
|
145 |
stream=True
|
146 |
):
|
147 |
+
|
148 |
+
#for chunk in response:
|
149 |
+
collected_chunks.append(chunk) # save the event response
|
150 |
+
chunk_message = chunk['choices'][0]['delta'] # extract the message
|
151 |
+
collected_messages.append(chunk_message) # save the message
|
152 |
+
|
153 |
+
content=chunk["choices"][0].get("delta",{}).get("content")
|
154 |
+
|
155 |
+
# join method to concatenate the elements of the list
|
156 |
+
# into a single string,
|
157 |
+
# then strip out any empty strings
|
158 |
+
|
159 |
+
report.append(content)
|
160 |
result = "".join(report).strip()
|
161 |
result = result.replace("\n", "")
|
162 |
res_box.markdown(f'*{result}*')
|