bibibi12345 commited on
Commit
476f903
·
verified ·
1 Parent(s): 3c9b1bd

Update app/main.py

Browse files
Files changed (1) hide show
  1. app/main.py +38 -1
app/main.py CHANGED
@@ -261,6 +261,36 @@ def init_vertex_ai():
261
  print(f"ERROR: Failed to initialize client with credentials from Credential Manager file ({credential_manager.credentials_dir}): {e}")
262
  else:
263
  print(f"INFO: No credentials loaded via Credential Manager.")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
264
 
265
  # If none of the methods worked, this error is still useful
266
  # If we reach here, either no method worked, or a prior method already initialized the client
@@ -416,6 +446,11 @@ def create_gemini_prompt(messages: List[OpenAIMessage]) -> Union[types.Content,
416
 
417
  # Process all messages in their original order
418
  for idx, message in enumerate(messages):
 
 
 
 
 
419
  # Map OpenAI roles to Gemini roles
420
  role = message.role
421
 
@@ -449,7 +484,8 @@ def create_gemini_prompt(messages: List[OpenAIMessage]) -> Union[types.Content,
449
  for part in message.content:
450
  if isinstance(part, dict):
451
  if part.get('type') == 'text':
452
- parts.append(types.Part(text=part.get('text', '')))
 
453
  elif part.get('type') == 'image_url':
454
  image_url = part.get('image_url', {}).get('url', '')
455
  if image_url.startswith('data:'):
@@ -1083,6 +1119,7 @@ async def chat_completions(request: OpenAIRequest, api_key: str = Depends(get_ap
1083
  try:
1084
  for candidate_index in range(candidate_count):
1085
  print(f"Sending streaming request to Gemini API (Model: {model_name}, Prompt Format: {prompt_func.__name__})")
 
1086
  responses = await client_instance.aio.models.generate_content_stream( # Use client_instance
1087
  model=model_name,
1088
  contents=prompt,
 
261
  print(f"ERROR: Failed to initialize client with credentials from Credential Manager file ({credential_manager.credentials_dir}): {e}")
262
  else:
263
  print(f"INFO: No credentials loaded via Credential Manager.")
264
+
265
+ # Priority 3: Fall back to GOOGLE_APPLICATION_CREDENTIALS environment variable (file path)
266
+ file_path = os.environ.get("GOOGLE_APPLICATION_CREDENTIALS")
267
+ if file_path:
268
+ print(f"INFO: Checking GOOGLE_APPLICATION_CREDENTIALS file path: {file_path}")
269
+ if os.path.exists(file_path):
270
+ try:
271
+ print(f"INFO: File exists, attempting to load credentials")
272
+ credentials = service_account.Credentials.from_service_account_file(
273
+ file_path,
274
+ scopes=['https://www.googleapis.com/auth/cloud-platform']
275
+ )
276
+ project_id = credentials.project_id
277
+ print(f"Successfully loaded credentials from file for project: {project_id}")
278
+
279
+ try:
280
+ # Initialize the global client ONLY if it hasn't been set yet
281
+ if client is None:
282
+ client = genai.Client(vertexai=True, credentials=credentials, project=project_id, location="us-central1")
283
+ print(f"INFO: Initialized fallback Vertex AI client using GOOGLE_APPLICATION_CREDENTIALS file path for project: {project_id}")
284
+ return True # Successfully initialized global client
285
+ else:
286
+ print(f"INFO: Fallback client already initialized. GOOGLE_APPLICATION_CREDENTIALS validated for project: {project_id}")
287
+ # If client was already set, we don't need to return True, just let it finish
288
+ except Exception as client_err:
289
+ print(f"ERROR: Failed to initialize client with credentials from GOOGLE_APPLICATION_CREDENTIALS file ({file_path}): {client_err}")
290
+ except Exception as e:
291
+ print(f"ERROR: Failed to load credentials from GOOGLE_APPLICATION_CREDENTIALS path ({file_path}): {e}") # Added context
292
+ else:
293
+ print(f"ERROR: GOOGLE_APPLICATION_CREDENTIALS file does not exist at path: {file_path}")
294
 
295
  # If none of the methods worked, this error is still useful
296
  # If we reach here, either no method worked, or a prior method already initialized the client
 
446
 
447
  # Process all messages in their original order
448
  for idx, message in enumerate(messages):
449
+ # Skip messages with empty content
450
+ if not message.content:
451
+ print(f"Skipping message {idx} due to empty content (Role: {message.role})")
452
+ continue
453
+
454
  # Map OpenAI roles to Gemini roles
455
  role = message.role
456
 
 
484
  for part in message.content:
485
  if isinstance(part, dict):
486
  if part.get('type') == 'text':
487
+ print("Empty message detected. Auto fill in.")
488
+ parts.append(types.Part(text=part.get('text', '\n')))
489
  elif part.get('type') == 'image_url':
490
  image_url = part.get('image_url', {}).get('url', '')
491
  if image_url.startswith('data:'):
 
1119
  try:
1120
  for candidate_index in range(candidate_count):
1121
  print(f"Sending streaming request to Gemini API (Model: {model_name}, Prompt Format: {prompt_func.__name__})")
1122
+ # print(prompt)
1123
  responses = await client_instance.aio.models.generate_content_stream( # Use client_instance
1124
  model=model_name,
1125
  contents=prompt,