# FhirFlame Environment Configuration # ============================================================================= # API Keys (Optional - app works without them) # ============================================================================= # Mistral API Configuration MISTRAL_API_KEY=your_mistral_api_key_here # HuggingFace Configuration HF_TOKEN=your_huggingface_token_here # Modal Labs Configuration MODAL_TOKEN_ID=your_modal_token_id_here MODAL_TOKEN_SECRET=your_modal_token_secret_here MODAL_ENDPOINT_URL=https://your-modal-app.modal.run # Ollama Configuration OLLAMA_BASE_URL=http://localhost:11434 OLLAMA_MODEL=codellama:13b-instruct USE_REAL_OLLAMA=true # ============================================================================= # Modal Labs GPU Pricing (USD per hour) # Based on Modal's official pricing as of 2024 # ============================================================================= # GPU Hourly Rates MODAL_A100_HOURLY_RATE=1.32 MODAL_T4_HOURLY_RATE=0.51 MODAL_L4_HOURLY_RATE=0.73 MODAL_CPU_HOURLY_RATE=0.048 # Modal Platform Fee (percentage markup) MODAL_PLATFORM_FEE=15 # GPU Performance Estimates (characters per second) MODAL_A100_CHARS_PER_SEC=2000 MODAL_T4_CHARS_PER_SEC=1200 MODAL_L4_CHARS_PER_SEC=800 # ============================================================================= # Cloud Provider Pricing # ============================================================================= # HuggingFace Inference API (USD per 1K tokens) HF_COST_PER_1K_TOKENS=0.06 # Ollama Local (free) OLLAMA_COST_PER_REQUEST=0.0 # ============================================================================= # Processing Configuration # ============================================================================= # Provider selection thresholds AUTO_SELECT_MODAL_THRESHOLD=1500 AUTO_SELECT_BATCH_THRESHOLD=5 # Demo and Development DEMO_MODE=false USE_COST_OPTIMIZATION=true # ============================================================================= # Monitoring and Observability (Optional) # ============================================================================= # Langfuse Configuration LANGFUSE_SECRET_KEY=your_langfuse_secret_key LANGFUSE_PUBLIC_KEY=your_langfuse_public_key LANGFUSE_HOST=https://cloud.langfuse.com # ============================================================================= # Medical AI Configuration # ============================================================================= # FHIR Validation FHIR_VALIDATION_LEVEL=standard ENABLE_FHIR_R4=true ENABLE_FHIR_R5=true # Medical Entity Extraction EXTRACT_PATIENT_INFO=true EXTRACT_CONDITIONS=true EXTRACT_MEDICATIONS=true EXTRACT_VITALS=true EXTRACT_PROCEDURES=true