Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -121,7 +121,6 @@ def read_file_content(file,max_length):
|
|
121 |
else:
|
122 |
return ""
|
123 |
|
124 |
-
|
125 |
def chat_with_model(prompt, document_section, model_choice='gpt-3.5-turbo'):
|
126 |
model = model_choice
|
127 |
conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}]
|
@@ -129,7 +128,6 @@ def chat_with_model(prompt, document_section, model_choice='gpt-3.5-turbo'):
|
|
129 |
if len(document_section)>0:
|
130 |
conversation.append({'role': 'assistant', 'content': document_section})
|
131 |
|
132 |
-
|
133 |
# iterate through the stream of events
|
134 |
start_time = time.time()
|
135 |
response = openai.ChatCompletion.create(
|
@@ -147,13 +145,11 @@ def chat_with_model(prompt, document_section, model_choice='gpt-3.5-turbo'):
|
|
147 |
collected_messages.append(chunk_message) # save the message
|
148 |
content=chunk["choices"][0].get("delta",{}).get("content")
|
149 |
st.write(f'*{content}*')
|
150 |
-
st.
|
151 |
full_reply_content = ''.join([m.get('content', '') for m in collected_messages])
|
152 |
-
st.
|
153 |
return full_reply_content
|
154 |
|
155 |
-
|
156 |
-
|
157 |
def chat_with_file_contents(prompt, file_content, model_choice='gpt-3.5-turbo'):
|
158 |
conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}]
|
159 |
conversation.append({'role': 'user', 'content': prompt})
|
|
|
121 |
else:
|
122 |
return ""
|
123 |
|
|
|
124 |
def chat_with_model(prompt, document_section, model_choice='gpt-3.5-turbo'):
|
125 |
model = model_choice
|
126 |
conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}]
|
|
|
128 |
if len(document_section)>0:
|
129 |
conversation.append({'role': 'assistant', 'content': document_section})
|
130 |
|
|
|
131 |
# iterate through the stream of events
|
132 |
start_time = time.time()
|
133 |
response = openai.ChatCompletion.create(
|
|
|
145 |
collected_messages.append(chunk_message) # save the message
|
146 |
content=chunk["choices"][0].get("delta",{}).get("content")
|
147 |
st.write(f'*{content}*')
|
148 |
+
st.write(f"Full response received {chunk_time:.2f} seconds after request")
|
149 |
full_reply_content = ''.join([m.get('content', '') for m in collected_messages])
|
150 |
+
st.write(f"Full conversation received: {full_reply_content}")
|
151 |
return full_reply_content
|
152 |
|
|
|
|
|
153 |
def chat_with_file_contents(prompt, file_content, model_choice='gpt-3.5-turbo'):
|
154 |
conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}]
|
155 |
conversation.append({'role': 'user', 'content': prompt})
|