Spaces:
Running
Running
File size: 1,885 Bytes
65703d9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 |
#!/usr/bin/env python3
"""
Debug test to understand why Gemini responses aren't being cleaned
"""
import asyncio
import sys
import os
# Add src to path
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'src'))
from langchain_google_genai import ChatGoogleGenerativeAI
from src.utils.config import config
async def test_gemini_response_structure():
"""Test the structure of Gemini responses to understand the cleaning issue"""
if not config.GEMINI_API_KEY:
print("β No Gemini API key available")
return False
try:
print("π§ͺ Testing Gemini response structure...")
# Initialize Gemini
llm = ChatGoogleGenerativeAI(
model="gemini-2.0-flash-lite",
google_api_key=config.GEMINI_API_KEY,
temperature=0.1
)
# Test simple query
response = await llm.ainvoke("What is 2+2?")
print(f"π Response type: {type(response)}")
print(f"π Response dir: {[attr for attr in dir(response) if not attr.startswith('_')]}")
if hasattr(response, 'content'):
print(f"β
Response has 'content' attribute")
print(f"π Content: {response.content}")
print(f"π Content type: {type(response.content)}")
else:
print("β Response does NOT have 'content' attribute")
print(f"π Full response: {str(response)}")
return True
except Exception as e:
print(f"β Test failed: {e}")
return False
async def main():
success = await test_gemini_response_structure()
if success:
print("\nπ Test completed!")
return 0
else:
print("\nβ Test failed!")
return 1
if __name__ == "__main__":
exit_code = asyncio.run(main())
sys.exit(exit_code)
|