#!/usr/bin/env python3 """ Test script to verify Ollama connectivity fixes """ import sys import os sys.path.append('.') from src.enhanced_codellama_processor import EnhancedCodeLlamaProcessor import asyncio async def test_ollama_fix(): print("šŸ”„ Testing Enhanced CodeLlama Processor with Ollama fixes...") # Initialize processor processor = EnhancedCodeLlamaProcessor() # Test simple medical text test_text = "Patient has diabetes and hypertension. Blood pressure is 140/90." print(f"šŸ“ Testing text: {test_text}") print("šŸ”„ Processing...") try: result = await processor.process_document( medical_text=test_text, document_type="clinical_note", extract_entities=True, generate_fhir=False ) print("āœ… Processing successful!") print(f"šŸ“‹ Provider used: {result.get('provider_metadata', {}).get('provider_used', 'Unknown')}") print(f"ā±ļø Processing time: {result.get('provider_metadata', {}).get('processing_time', 'Unknown')}") print(f"šŸ” Entities found: {result.get('extraction_results', {}).get('entities_found', 0)}") if result.get('extracted_data'): print("šŸ“Š Sample extracted data available") return True except Exception as e: print(f"āŒ Processing failed: {e}") return False if __name__ == "__main__": success = asyncio.run(test_ollama_fix()) if success: print("\nšŸŽ‰ Ollama connectivity fixes are working!") sys.exit(0) else: print("\nāŒ Issues still exist") sys.exit(1)