Spaces:
Running
Running
File size: 5,552 Bytes
ea970bb 6bcba58 ea970bb 3f05e5b ea970bb 3f05e5b ea970bb 6bcba58 ea970bb 6bcba58 ea970bb 5103369 ea970bb 6bcba58 ea970bb 6b02e11 ea970bb 6bcba58 ea970bb 6bcba58 ea970bb 6bcba58 ea970bb 6bcba58 ea970bb 6bcba58 d1dac8f ea970bb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 |
# Contact us:
import platform
import sys
import shutil
import os
import datetime
import subprocess
import gradio as gr
import spaces
@spaces.GPU(duration=120)
def check_python():
supported_minors = [9, 10, 11]
python_info = f"Python {platform.python_version()} on {platform.system()}"
if not (
int(sys.version_info.major) == 3
and int(sys.version_info.minor) in supported_minors
):
python_info += f"\nIncompatible Python version: {sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro} required 3.{supported_minors}"
return python_info
def check_torch():
torch_info = ""
if shutil.which('nvidia-smi') is not None or os.path.exists(
os.path.join(
os.environ.get('SystemRoot') or r'C:\Windows',
'System32',
'nvidia-smi.exe',
)
):
torch_info += 'NVIDIA toolkit detected\n'
elif shutil.which('rocminfo') is not None or os.path.exists('/opt/rocm/bin/rocminfo'):
torch_info += 'AMD toolkit detected\n'
elif (shutil.which('sycl-ls') is not None
or os.environ.get('ONEAPI_ROOT') is not None
or os.path.exists('/opt/intel/oneapi')):
torch_info += 'Intel OneAPI toolkit detected\n'
else:
torch_info += 'Using CPU-only Torch\n'
try:
import torch
try:
import intel_extension_for_pytorch as ipex
if torch.xpu.is_available():
from library.ipex import ipex_init
ipex_init()
os.environ.setdefault('NEOReadDebugKeys', '1')
os.environ.setdefault('ClDeviceGlobalMemSizeAvailablePercent', '100')
except Exception:
pass
torch_info += f'Torch {torch.__version__}\n'
if not torch.cuda.is_available():
torch_info += 'Torch reports CUDA not available\n'
else:
if torch.version.cuda:
if hasattr(torch, "xpu") and torch.xpu.is_available():
torch_info += f'Torch backend: Intel IPEX OneAPI {ipex.__version__}\n'
else:
torch_info += f'Torch backend: NVIDIA CUDA {torch.version.cuda} cuDNN {torch.backends.cudnn.version() if torch.backends.cudnn.is_available() else "N/A"}\n'
elif torch.version.hip:
torch_info += f'Torch backend: AMD ROCm HIP {torch.version.hip}\n'
else:
torch_info += 'Unknown Torch backend\n'
for device in [torch.cuda.device(i) for i in range(torch.cuda.device_count())]:
if hasattr(torch, "xpu") and torch.xpu.is_available():
torch_info += f'Torch detected GPU: {torch.xpu.get_device_name(device)} VRAM {round(torch.xpu.get_device_properties(device).total_memory / 1024 / 1024)} Compute Units {torch.xpu.get_device_properties(device).max_compute_units}\n'
else:
torch_info += f'Torch detected GPU: {torch.cuda.get_device_name(device)} VRAM {round(torch.cuda.get_device_properties(device).total_memory / 1024 / 1024)} Arch {torch.cuda.get_device_capability(device)} Cores {torch.cuda.get_device_properties(device).multi_processor_count}\n'
return torch_info
except Exception as e:
return f'Could not load torch: {e}'
def get_installed_packages():
pkgs_installed = subprocess.getoutput("pip freeze")
output_lines = [line for line in pkgs_installed.splitlines() if "WARNING" not in line]
packages = []
for line in output_lines:
if '==' in line:
pkg_name, pkg_version = line.split('==')
packages.append((pkg_name, pkg_version))
packages.sort(key=lambda x: x[0].lower())
return packages
def get_installed_packages_version():
pkgs_installed = subprocess.getoutput("pip freeze")
output_lines = [line for line in pkgs_installed.splitlines() if "WARNING" not in line]
packages_version = []
for line in output_lines:
if '==' in line:
pkg_name, pkg_version = line.split('==')
packages_version.append((pkg_name, pkg_version))
return packages_version
def match_packages_with_versions():
#local = pathlib.Path(__file__).parent
requirements_file = os.path.join(os.path.dirname(__file__),"requirements.txt")
with open(requirements_file, 'r') as f:
requirements = f.read().splitlines()
requirements = [req.split('==')[0] for req in requirements if req and not req.startswith('#')]
installed_packages = get_installed_packages_version()
installed_dict = {pkg: version for pkg, version in installed_packages}
matched_packages = []
for req in requirements:
if req in installed_dict:
matched_packages.append((req, installed_dict[req]))
else:
matched_packages.append((req, "Not installed"))
return matched_packages
def display_info():
current_datetime = datetime.datetime.now()
current_datetime_str = current_datetime.strftime('%m/%d/%Y-%H:%M:%S')
python_info = check_python()
torch_info = check_torch()
packages = get_installed_packages()
versions = match_packages_with_versions()
return f"Machine local date and time: {current_datetime_str}\n\n{python_info}\n\n{torch_info}\n\nInstalled packages:\n{packages}\n\nMatched packages with versions:\n{versions}"
with gr.Blocks() as demo:
output = gr.Textbox(lines=20, label="System Information")
btn = gr.Button("Check System")
btn.click(display_info, inputs=[], outputs=[output])
demo.launch() |