MoraxCheng's picture
Add keep-alive scripts and environment configuration for Tranception Space
7a6c881
#!/usr/bin/env python3
"""
Health check script for Tranception app on Hugging Face Spaces
"""
import os
import sys
import torch
def check_environment():
"""Check if the environment is properly configured"""
print("=== Tranception Health Check ===")
# Check Python version
print(f"Python version: {sys.version}")
# Check PyTorch
print(f"PyTorch version: {torch.__version__}")
print(f"CUDA available: {torch.cuda.is_available()}")
if torch.cuda.is_available():
print(f"CUDA version: {torch.version.cuda}")
print(f"GPU: {torch.cuda.get_device_name(0)}")
# Check environment variables
print(f"\nEnvironment variables:")
print(f"DISABLE_ZERO_GPU: {os.environ.get('DISABLE_ZERO_GPU', 'not set')}")
print(f"SPACE_ID: {os.environ.get('SPACE_ID', 'not set')}")
# Check if running on Hugging Face Spaces
if os.environ.get('SPACE_ID'):
print("\nRunning on Hugging Face Spaces")
# Try to import spaces module
try:
import spaces
print("βœ“ spaces module available")
# Try to create a GPU decorator
try:
test_decorator = spaces.GPU()
print("βœ“ Zero GPU decorator can be created")
except Exception as e:
print(f"βœ— Zero GPU decorator error: {e}")
except ImportError:
print("βœ— spaces module not available")
else:
print("\nNot running on Hugging Face Spaces")
# Check model files
print(f"\nChecking model availability on Hugging Face Hub:")
models = ["Tranception_Small", "Tranception_Medium", "Tranception_Large"]
for model in models:
print(f"- PascalNotin/{model}: Available on HF Hub")
print("\n=== Health check complete ===")
if __name__ == "__main__":
check_environment()