Spaces:
Sleeping
Sleeping
Update inference.py
Browse files- inference.py +17 -0
inference.py
CHANGED
@@ -6,6 +6,8 @@ from evo_model import EvoTransformerV22
|
|
6 |
from search_utils import web_search
|
7 |
import openai
|
8 |
import time
|
|
|
|
|
9 |
|
10 |
# 🔐 Load OpenAI API Key securely
|
11 |
openai.api_key = os.getenv("OPENAI_API_KEY")
|
@@ -92,3 +94,18 @@ def get_model_config():
|
|
92 |
"ffn_dim": 1024,
|
93 |
"memory_enabled": True
|
94 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
from search_utils import web_search
|
7 |
import openai
|
8 |
import time
|
9 |
+
import psutil
|
10 |
+
import platform
|
11 |
|
12 |
# 🔐 Load OpenAI API Key securely
|
13 |
openai.api_key = os.getenv("OPENAI_API_KEY")
|
|
|
94 |
"ffn_dim": 1024,
|
95 |
"memory_enabled": True
|
96 |
}
|
97 |
+
|
98 |
+
def get_system_stats():
|
99 |
+
gpu_info = torch.cuda.get_device_properties(0) if torch.cuda.is_available() else None
|
100 |
+
memory = psutil.virtual_memory()
|
101 |
+
|
102 |
+
return {
|
103 |
+
"device": "GPU" if torch.cuda.is_available() else "CPU",
|
104 |
+
"cpu_usage_percent": psutil.cpu_percent(),
|
105 |
+
"memory_used_gb": round(memory.used / (1024 ** 3), 2),
|
106 |
+
"memory_total_gb": round(memory.total / (1024 ** 3), 2),
|
107 |
+
"gpu_name": gpu_info.name if gpu_info else "N/A",
|
108 |
+
"gpu_memory_total_gb": round(gpu_info.total_memory / (1024 ** 3), 2) if gpu_info else "N/A",
|
109 |
+
"gpu_memory_used_gb": round(torch.cuda.memory_allocated() / (1024 ** 3), 2) if gpu_info else "N/A",
|
110 |
+
"platform": platform.platform()
|
111 |
+
}
|