Commit
·
c204ed5
1
Parent(s):
d9d3a0e
bug fix
Browse files- app/api_helpers.py +42 -6
app/api_helpers.py
CHANGED
@@ -248,13 +248,28 @@ def create_generation_config(request: OpenAIRequest) -> Dict[str, Any]:
|
|
248 |
|
249 |
def is_gemini_response_valid(response: Any) -> bool:
|
250 |
if response is None: return False
|
251 |
-
|
|
|
|
|
|
|
|
|
|
|
252 |
if hasattr(response, 'candidates') and response.candidates:
|
253 |
for candidate in response.candidates:
|
254 |
-
|
|
|
|
|
|
|
|
|
255 |
if hasattr(candidate, 'content') and hasattr(candidate.content, 'parts') and candidate.content.parts:
|
256 |
-
for part_item in candidate.content.parts:
|
257 |
-
if
|
|
|
|
|
|
|
|
|
|
|
|
|
258 |
return False
|
259 |
|
260 |
async def _base_fake_stream_engine(
|
@@ -581,6 +596,27 @@ async def execute_gemini_call(
|
|
581 |
block_msg+=f" ({response_obj_call.prompt_feedback.block_reason_message})"
|
582 |
raise ValueError(block_msg)
|
583 |
|
584 |
-
if not is_gemini_response_valid(response_obj_call):
|
585 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
586 |
return JSONResponse(content=convert_to_openai_format(response_obj_call, request_obj.model))
|
|
|
248 |
|
249 |
def is_gemini_response_valid(response: Any) -> bool:
|
250 |
if response is None: return False
|
251 |
+
|
252 |
+
# Check for direct text attribute (SDK response)
|
253 |
+
if hasattr(response, 'text') and isinstance(response.text, str) and response.text.strip():
|
254 |
+
return True
|
255 |
+
|
256 |
+
# Check for candidates (both SDK and DirectVertexClient responses)
|
257 |
if hasattr(response, 'candidates') and response.candidates:
|
258 |
for candidate in response.candidates:
|
259 |
+
# Check for direct text on candidate
|
260 |
+
if hasattr(candidate, 'text') and isinstance(candidate.text, str) and candidate.text.strip():
|
261 |
+
return True
|
262 |
+
|
263 |
+
# Check for content with parts
|
264 |
if hasattr(candidate, 'content') and hasattr(candidate.content, 'parts') and candidate.content.parts:
|
265 |
+
for part_item in candidate.content.parts:
|
266 |
+
# Check if part has text (handle both SDK and AttrDict)
|
267 |
+
if hasattr(part_item, 'text'):
|
268 |
+
# AttrDict might have empty string instead of None
|
269 |
+
part_text = getattr(part_item, 'text', None)
|
270 |
+
if part_text is not None and isinstance(part_text, str) and part_text.strip():
|
271 |
+
return True
|
272 |
+
|
273 |
return False
|
274 |
|
275 |
async def _base_fake_stream_engine(
|
|
|
596 |
block_msg+=f" ({response_obj_call.prompt_feedback.block_reason_message})"
|
597 |
raise ValueError(block_msg)
|
598 |
|
599 |
+
if not is_gemini_response_valid(response_obj_call):
|
600 |
+
# Create a more informative error message
|
601 |
+
error_details = f"Invalid non-streaming Gemini response for model string '{model_to_call}'. "
|
602 |
+
|
603 |
+
# Try to extract useful information from the response
|
604 |
+
if hasattr(response_obj_call, 'candidates'):
|
605 |
+
error_details += f"Candidates: {len(response_obj_call.candidates) if response_obj_call.candidates else 0}. "
|
606 |
+
if response_obj_call.candidates and len(response_obj_call.candidates) > 0:
|
607 |
+
candidate = response_obj_call.candidates[0]
|
608 |
+
if hasattr(candidate, 'content'):
|
609 |
+
error_details += "Has content. "
|
610 |
+
if hasattr(candidate.content, 'parts'):
|
611 |
+
error_details += f"Parts: {len(candidate.content.parts) if candidate.content.parts else 0}. "
|
612 |
+
if candidate.content.parts and len(candidate.content.parts) > 0:
|
613 |
+
part = candidate.content.parts[0]
|
614 |
+
if hasattr(part, 'text'):
|
615 |
+
text_preview = str(getattr(part, 'text', ''))[:100]
|
616 |
+
error_details += f"First part text: '{text_preview}'"
|
617 |
+
else:
|
618 |
+
# If it's not the expected structure, show the type
|
619 |
+
error_details += f"Response type: {type(response_obj_call).__name__}"
|
620 |
+
|
621 |
+
raise ValueError(error_details)
|
622 |
return JSONResponse(content=convert_to_openai_format(response_obj_call, request_obj.model))
|