Update app.py
Browse files
app.py
CHANGED
|
@@ -162,20 +162,13 @@ def chat_with_model(prompt, document_section, model_choice='gpt-3.5-turbo'):
|
|
| 162 |
collected_chunks.append(chunk) # save the event response
|
| 163 |
chunk_message = chunk['choices'][0]['delta'] # extract the message
|
| 164 |
collected_messages.append(chunk_message) # save the message
|
| 165 |
-
|
| 166 |
-
|
| 167 |
-
# print the time delay and text received
|
| 168 |
-
print(f"Full response received {chunk_time:.2f} seconds after request")
|
| 169 |
full_reply_content = ''.join([m.get('content', '') for m in collected_messages])
|
| 170 |
-
|
| 171 |
-
|
| 172 |
-
|
| 173 |
-
|
| 174 |
-
|
| 175 |
-
|
| 176 |
#return response
|
| 177 |
#return response['choices'][0]['message']['content']
|
| 178 |
-
return
|
| 179 |
|
| 180 |
|
| 181 |
def chat_with_file_contents(prompt, file_content, model_choice='gpt-3.5-turbo'):
|
|
|
|
| 162 |
collected_chunks.append(chunk) # save the event response
|
| 163 |
chunk_message = chunk['choices'][0]['delta'] # extract the message
|
| 164 |
collected_messages.append(chunk_message) # save the message
|
| 165 |
+
st.write(f"Message received {chunk_time:.2f} seconds after request: {chunk_message}") # print the delay and text
|
| 166 |
+
st.write(f"Full response received {chunk_time:.2f} seconds after request")
|
|
|
|
|
|
|
| 167 |
full_reply_content = ''.join([m.get('content', '') for m in collected_messages])
|
| 168 |
+
st.write(f"Full conversation received: {full_reply_content}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 169 |
#return response
|
| 170 |
#return response['choices'][0]['message']['content']
|
| 171 |
+
return full_reply_content
|
| 172 |
|
| 173 |
|
| 174 |
def chat_with_file_contents(prompt, file_content, model_choice='gpt-3.5-turbo'):
|