Daemontatox commited on
Commit
fdf8e05
·
verified ·
1 Parent(s): f973326

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -13
app.py CHANGED
@@ -161,9 +161,9 @@ def convert_history_to_cohere_format(history: List[List[str]]) -> List[dict]:
161
  cohere_history = []
162
  for user_msg, assistant_msg in history:
163
  if user_msg:
164
- cohere_history.append({"role": "USER", "message": user_msg})
165
  if assistant_msg:
166
- cohere_history.append({"role": "ASSISTANT", "message": assistant_msg})
167
  return cohere_history
168
 
169
  def chat_response(
@@ -184,22 +184,22 @@ def chat_response(
184
  # Convert history to Cohere format
185
  chat_history = convert_history_to_cohere_format(history)
186
 
187
- # Initialize stream
188
- stream = co.chat_stream(
189
- model='c4ai-aya-expanse-32b',
190
- message=message,
191
- temperature=temperature,
192
- chat_history=chat_history,
193
- prompt_truncation='AUTO',
194
- preamble=system_prompt
195
- )
196
-
197
  # Initialize buffer for streaming
198
  buffer = ""
199
  history = history + [[message, ""]]
200
 
201
  # Process stream
202
  try:
 
 
 
 
 
 
 
 
 
 
203
  for event in stream:
204
  if event.event_type == "text-generation":
205
  buffer += event.text
@@ -207,7 +207,8 @@ def chat_response(
207
  history[-1][1] = formatted_buffer
208
  chat_display = format_chat_history(history)
209
  yield history, chat_display
210
- except cohere.errors.CohereError as e:
 
211
  error_message = f"Error: {str(e)}"
212
  history[-1][1] = error_message
213
  chat_display = format_chat_history(history)
 
161
  cohere_history = []
162
  for user_msg, assistant_msg in history:
163
  if user_msg:
164
+ cohere_history.append({"role": "User", "message": user_msg})
165
  if assistant_msg:
166
+ cohere_history.append({"role": "Chatbot", "message": assistant_msg})
167
  return cohere_history
168
 
169
  def chat_response(
 
184
  # Convert history to Cohere format
185
  chat_history = convert_history_to_cohere_format(history)
186
 
 
 
 
 
 
 
 
 
 
 
187
  # Initialize buffer for streaming
188
  buffer = ""
189
  history = history + [[message, ""]]
190
 
191
  # Process stream
192
  try:
193
+ # Initialize stream
194
+ stream = co.chat_stream(
195
+ model='c4ai-aya-expanse-32b',
196
+ message=message,
197
+ temperature=temperature,
198
+ chat_history=chat_history,
199
+ prompt_truncation='AUTO',
200
+ preamble=system_prompt
201
+ )
202
+
203
  for event in stream:
204
  if event.event_type == "text-generation":
205
  buffer += event.text
 
207
  history[-1][1] = formatted_buffer
208
  chat_display = format_chat_history(history)
209
  yield history, chat_display
210
+
211
+ except Exception as e:
212
  error_message = f"Error: {str(e)}"
213
  history[-1][1] = error_message
214
  chat_display = format_chat_history(history)