Ganesh Chintalapati commited on
Commit
74a52a0
·
1 Parent(s): 1d36e10

gpt-4o-mini streaming error fix

Browse files
Files changed (1) hide show
  1. app.py +36 -11
app.py CHANGED
@@ -24,6 +24,7 @@ async def ask_openai(query: str, history: List[Dict[str, str]]) -> AsyncGenerato
24
  yield "Error: OpenAI API key not provided."
25
  return
26
 
 
27
  messages = []
28
  for msg in history:
29
  messages.append({"role": "user", "content": msg["user"]})
@@ -48,6 +49,7 @@ async def ask_openai(query: str, history: List[Dict[str, str]]) -> AsyncGenerato
48
  response.raise_for_status()
49
  async for chunk in response.aiter_text():
50
  if chunk:
 
51
  lines = chunk.splitlines()
52
  for line in lines:
53
  if line.startswith("data: "):
@@ -57,19 +59,25 @@ async def ask_openai(query: str, history: List[Dict[str, str]]) -> AsyncGenerato
57
  if not data.strip():
58
  continue
59
  try:
60
- json_data = json.loads(data)
61
  if "choices" in json_data and json_data["choices"]:
62
  delta = json_data["choices"][0].get("delta", {})
63
  if "content" in delta and delta["content"] is not None:
64
  yield delta["content"]
65
  except json.JSONDecodeError as e:
 
66
  yield f"Error parsing stream: {str(e)}"
67
  except Exception as e:
 
68
  yield f"Error in stream: {str(e)}"
69
 
70
  except httpx.HTTPStatusError as e:
71
- yield f"Error: OpenAI HTTP Status Error: {e.response.status_code}"
 
 
 
72
  except Exception as e:
 
73
  yield f"Error: OpenAI Error: {str(e)}"
74
 
75
  async def ask_anthropic(query: str, history: List[Dict[str, str]]) -> str:
@@ -78,6 +86,7 @@ async def ask_anthropic(query: str, history: List[Dict[str, str]]) -> str:
78
  logger.error("Anthropic API key not provided")
79
  return "Error: Anthropic API key not provided."
80
 
 
81
  messages = []
82
  for msg in history:
83
  messages.append({"role": "user", "content": msg["user"]})
@@ -119,6 +128,7 @@ async def ask_gemini(query: str, history: List[Dict[str, str]]) -> str:
119
  logger.error("Gemini API key not provided")
120
  return "Error: Gemini API key not provided."
121
 
 
122
  history_text = ""
123
  for msg in history:
124
  history_text += f"User: {msg['user']}\nAssistant: {msg['bot']}\n" if msg["bot"] else f"User: {msg['user']}\n"
@@ -152,9 +162,12 @@ async def ask_gemini(query: str, history: List[Dict[str, str]]) -> str:
152
 
153
  async def query_model(query: str, provider: str, history: List[Dict[str, str]]) -> AsyncGenerator[Tuple[str, List[Dict[str, str]]], None]:
154
  provider = provider.lower()
 
155
 
 
156
  if provider == "openai":
157
  async for chunk in ask_openai(query, history):
 
158
  yield chunk, history # Yield partial response for streaming
159
  elif provider == "anthropic":
160
  response = await ask_anthropic(query, history)
@@ -164,6 +177,7 @@ async def query_model(query: str, provider: str, history: List[Dict[str, str]])
164
  yield response, history
165
  else:
166
  response = f"Error: Unknown provider: {provider}"
 
167
  yield response, history
168
 
169
  # Update history with the new query and response
@@ -177,22 +191,33 @@ async def submit_query(query: str, provider: str, history: List[Dict[str, str]])
177
  return
178
 
179
  response = ""
180
-
 
 
 
 
181
  async for response_chunk, updated_history in query_model(query, provider, history):
182
  response += response_chunk
183
-
184
- # Convert history to chatbot messages format
185
  chatbot_messages = []
186
  for msg in updated_history:
187
  chatbot_messages.append({"role": "user", "content": msg["user"]})
188
  if msg["bot"]:
189
  chatbot_messages.append({"role": "assistant", "content": msg["bot"]})
190
-
191
- # Yield incremental updates for streaming
192
- yield response_chunk, chatbot_messages, updated_history
193
-
194
- updated_history.append({"user": query, "bot": response})
195
- yield response, chatbot_messages, updated_history
 
 
 
 
 
 
 
 
196
 
197
  # Gradio interface
198
  def clear_history():
 
24
  yield "Error: OpenAI API key not provided."
25
  return
26
 
27
+ # Build message history
28
  messages = []
29
  for msg in history:
30
  messages.append({"role": "user", "content": msg["user"]})
 
49
  response.raise_for_status()
50
  async for chunk in response.aiter_text():
51
  if chunk:
52
+ # Parse the streaming chunk (JSON lines)
53
  lines = chunk.splitlines()
54
  for line in lines:
55
  if line.startswith("data: "):
 
59
  if not data.strip():
60
  continue
61
  try:
62
+ json_data = json.loads(data) # Safely parse JSON
63
  if "choices" in json_data and json_data["choices"]:
64
  delta = json_data["choices"][0].get("delta", {})
65
  if "content" in delta and delta["content"] is not None:
66
  yield delta["content"]
67
  except json.JSONDecodeError as e:
68
+ logger.error(f"Error parsing OpenAI stream chunk: {str(e)} - Data: {data}")
69
  yield f"Error parsing stream: {str(e)}"
70
  except Exception as e:
71
+ logger.error(f"Unexpected error in OpenAI stream: {str(e)} - Data: {data}")
72
  yield f"Error in stream: {str(e)}"
73
 
74
  except httpx.HTTPStatusError as e:
75
+ # Read the response body for streaming responses
76
+ response_text = await e.response.aread()
77
+ logger.error(f"OpenAI HTTP Status Error: {e.response.status_code}, {response_text}")
78
+ yield f"Error: OpenAI HTTP Status Error: {e.response.status_code}, {response_text.decode('utf-8')}"
79
  except Exception as e:
80
+ logger.error(f"OpenAI Error: {str(e)}")
81
  yield f"Error: OpenAI Error: {str(e)}"
82
 
83
  async def ask_anthropic(query: str, history: List[Dict[str, str]]) -> str:
 
86
  logger.error("Anthropic API key not provided")
87
  return "Error: Anthropic API key not provided."
88
 
89
+ # Build message history
90
  messages = []
91
  for msg in history:
92
  messages.append({"role": "user", "content": msg["user"]})
 
128
  logger.error("Gemini API key not provided")
129
  return "Error: Gemini API key not provided."
130
 
131
+ # Gemini doesn't natively support chat history in the same way, so we concatenate history as text
132
  history_text = ""
133
  for msg in history:
134
  history_text += f"User: {msg['user']}\nAssistant: {msg['bot']}\n" if msg["bot"] else f"User: {msg['user']}\n"
 
162
 
163
  async def query_model(query: str, provider: str, history: List[Dict[str, str]]) -> AsyncGenerator[Tuple[str, List[Dict[str, str]]], None]:
164
  provider = provider.lower()
165
+ response = "" # Initialize response to avoid UnboundLocalError
166
 
167
+ logger.info(f"Processing query with provider: {provider}")
168
  if provider == "openai":
169
  async for chunk in ask_openai(query, history):
170
+ response += chunk
171
  yield chunk, history # Yield partial response for streaming
172
  elif provider == "anthropic":
173
  response = await ask_anthropic(query, history)
 
177
  yield response, history
178
  else:
179
  response = f"Error: Unknown provider: {provider}"
180
+ logger.error(response)
181
  yield response, history
182
 
183
  # Update history with the new query and response
 
191
  return
192
 
193
  response = ""
194
+ chatbot_messages = []
195
+ for msg in history:
196
+ chatbot_messages.append({"role": "user", "content": msg["user"]})
197
+ if msg["bot"]:
198
+ chatbot_messages.append({"role": "assistant", "content": msg["bot"]})
199
  async for response_chunk, updated_history in query_model(query, provider, history):
200
  response += response_chunk
201
+ # Update chatbot messages for streaming
 
202
  chatbot_messages = []
203
  for msg in updated_history:
204
  chatbot_messages.append({"role": "user", "content": msg["user"]})
205
  if msg["bot"]:
206
  chatbot_messages.append({"role": "assistant", "content": msg["bot"]})
207
+ if response and provider == "openai":
208
+ # For streaming, show partial response
209
+ if chatbot_messages and chatbot_messages[-1]["role"] == "user":
210
+ chatbot_messages.append({"role": "assistant", "content": response})
211
+ else:
212
+ chatbot_messages[-1] = {"role": "assistant", "content": response}
213
+ yield "", chatbot_messages, updated_history # Yield to chatbot, not query
214
+ # Final yield with complete response
215
+ chatbot_messages = []
216
+ for msg in updated_history:
217
+ chatbot_messages.append({"role": "user", "content": msg["user"]})
218
+ if msg["bot"]:
219
+ chatbot_messages.append({"role": "assistant", "content": msg["bot"]})
220
+ yield "", chatbot_messages, updated_history
221
 
222
  # Gradio interface
223
  def clear_history():