Ganesh Chintalapati commited on
Commit
db34497
·
1 Parent(s): 2cee09f

OpenAI Anthropic Gemini works

Browse files
Files changed (1) hide show
  1. app.py +31 -6
app.py CHANGED
@@ -157,7 +157,8 @@ async def ask_gemini(query: str, history: List[Dict[str, str]]) -> str:
157
  }
158
 
159
  try:
160
- async with httpx.AsyncClient() as client:
 
161
  response = await client.post(
162
  f"https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash:generateContent?key={gemini_api_key}",
163
  headers=headers,
@@ -165,12 +166,36 @@ async def ask_gemini(query: str, history: List[Dict[str, str]]) -> str:
165
  )
166
 
167
  response.raise_for_status()
168
- return response.json()['candidates'][0]['content']['parts'][0]['text']
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
169
  except httpx.HTTPStatusError as e:
170
- logger.error(f"Gemini HTTP Status Error: {e.response.status_code}, {e.response.text}")
171
- return f"Error: Gemini HTTP Status Error: {e.response.status_code}, {e.response.text}"
 
172
  except Exception as e:
173
- logger.error(f"Gemini Error: {str(e)}")
174
  return f"Error: Gemini Error: {str(e)}"
175
 
176
  async def query_model(query: str, providers: List[str], history: List[Dict[str, str]]) -> AsyncGenerator[Tuple[str, List[Dict[str, str]]], None]:
@@ -203,7 +228,7 @@ async def query_model(query: str, providers: List[str], history: List[Dict[str,
203
  # Handle Gemini (non-streaming)
204
  if "Gemini" in providers:
205
  response = await ask_gemini(query, history)
206
- if response.strip():
207
  responses.append(f"[Gemini]: {response}")
208
 
209
  # Combine responses
 
157
  }
158
 
159
  try:
160
+ async with httpx.AsyncClient(timeout=30.0) as client:
161
+ logger.info(f"Sending Gemini request: {payload}")
162
  response = await client.post(
163
  f"https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash:generateContent?key={gemini_api_key}",
164
  headers=headers,
 
166
  )
167
 
168
  response.raise_for_status()
169
+ response_json = response.json()
170
+ logger.info(f"Gemini response: {response_json}")
171
+
172
+ # Validate response structure
173
+ if not isinstance(response_json, dict) or "candidates" not in response_json or not response_json["candidates"]:
174
+ logger.error(f"Invalid Gemini response structure: {response_json}")
175
+ return f"Error: Invalid Gemini response structure"
176
+
177
+ candidates = response_json["candidates"]
178
+ if not isinstance(candidates, list) or not candidates or "content" not in candidates[0]:
179
+ logger.error(f"Invalid Gemini candidates format: {candidates}")
180
+ return f"Error: Invalid Gemini candidates format"
181
+
182
+ content = candidates[0]["content"]
183
+ if not isinstance(content, dict) or "parts" not in content or not content["parts"]:
184
+ logger.error(f"Invalid Gemini content format: {content}")
185
+ return f"Error: Invalid Gemini content format"
186
+
187
+ parts = content["parts"]
188
+ if not isinstance(parts, list) or not parts or "text" not in parts[0]:
189
+ logger.error(f"Invalid Gemini parts format: {parts}")
190
+ return f"Error: Invalid Gemini parts format"
191
+
192
+ return parts[0]["text"]
193
  except httpx.HTTPStatusError as e:
194
+ response_text = await e.response.aread()
195
+ logger.error(f"Gemini HTTP Status Error: {e.response.status_code}, {response_text.decode('utf-8')}")
196
+ return f"Error: Gemini HTTP Status Error: {e.response.status_code}, {response_text.decode('utf-8')}"
197
  except Exception as e:
198
+ logger.error(f"Gemini Error: {str(e)}\nStack trace: {traceback.format_exc()}")
199
  return f"Error: Gemini Error: {str(e)}"
200
 
201
  async def query_model(query: str, providers: List[str], history: List[Dict[str, str]]) -> AsyncGenerator[Tuple[str, List[Dict[str, str]]], None]:
 
228
  # Handle Gemini (non-streaming)
229
  if "Gemini" in providers:
230
  response = await ask_gemini(query, history)
231
+ if response.strip() and not response.startswith("Error:"):
232
  responses.append(f"[Gemini]: {response}")
233
 
234
  # Combine responses