Update app/main.py
Browse files- app/main.py +1 -2
app/main.py
CHANGED
@@ -790,7 +790,7 @@ def create_encrypted_full_gemini_prompt(messages: List[OpenAIMessage]) -> Union[
|
|
790 |
processed_messages.append(OpenAIMessage(role="user", content=OBFUSCATION_PROMPT))
|
791 |
print("INFO: Obfuscation prompt added as the first message (edge case).")
|
792 |
# If there are messages but none are user/system, the prompt is not added (according to original logic interpretation)
|
793 |
-
|
794 |
|
795 |
|
796 |
|
@@ -1231,7 +1231,6 @@ async def chat_completions(request: OpenAIRequest, api_key: str = Depends(get_ap
|
|
1231 |
# --- Helper function to make the API call (handles stream/non-stream) ---
|
1232 |
async def make_gemini_call(client_instance, model_name, prompt_func, current_gen_config): # Add client_instance parameter
|
1233 |
prompt = prompt_func(request.messages)
|
1234 |
-
print(prompt)
|
1235 |
|
1236 |
# Log prompt structure
|
1237 |
if isinstance(prompt, list):
|
|
|
790 |
processed_messages.append(OpenAIMessage(role="user", content=OBFUSCATION_PROMPT))
|
791 |
print("INFO: Obfuscation prompt added as the first message (edge case).")
|
792 |
# If there are messages but none are user/system, the prompt is not added (according to original logic interpretation)
|
793 |
+
return create_encrypted_gemini_prompt(processed_messages)
|
794 |
|
795 |
|
796 |
|
|
|
1231 |
# --- Helper function to make the API call (handles stream/non-stream) ---
|
1232 |
async def make_gemini_call(client_instance, model_name, prompt_func, current_gen_config): # Add client_instance parameter
|
1233 |
prompt = prompt_func(request.messages)
|
|
|
1234 |
|
1235 |
# Log prompt structure
|
1236 |
if isinstance(prompt, list):
|