Spaces:
Paused
Paused
Update controllers/admin_controller.py
Browse files- controllers/admin_controller.py +103 -70
controllers/admin_controller.py
CHANGED
@@ -3,78 +3,111 @@ from core import service_config, llm_models
|
|
3 |
from llm_model import LLMModel
|
4 |
from intent_utils import background_training
|
5 |
from log import log
|
6 |
-
import json, os, shutil
|
7 |
|
8 |
router = APIRouter()
|
9 |
|
10 |
@router.post("/reload_config")
|
11 |
async def reload_config(request: Request):
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
from llm_model import LLMModel
|
4 |
from intent_utils import background_training
|
5 |
from log import log
|
6 |
+
import json, os, shutil, threading
|
7 |
|
8 |
router = APIRouter()
|
9 |
|
10 |
@router.post("/reload_config")
|
11 |
async def reload_config(request: Request):
|
12 |
+
body = await request.json()
|
13 |
+
project_name = body.get("project_name")
|
14 |
+
new_config_data = body.get("service_config")
|
15 |
+
|
16 |
+
if not project_name or not new_config_data:
|
17 |
+
return {"error": "project_name ve service_config gereklidir."}
|
18 |
+
|
19 |
+
def background_reload():
|
20 |
+
try:
|
21 |
+
current_project = service_config.projects.get(project_name)
|
22 |
+
incoming_project = new_config_data.get("projects", {}).get(project_name)
|
23 |
+
|
24 |
+
if not incoming_project:
|
25 |
+
log(f"❌ '{project_name}' yeni config içinde bulunamadı, işlem durduruldu.")
|
26 |
+
return
|
27 |
+
|
28 |
+
project_path = f"/data/projects/{project_name}"
|
29 |
+
temp_path = f"/data/projects/{project_name}_temp"
|
30 |
+
|
31 |
+
if os.path.exists(temp_path):
|
32 |
+
shutil.rmtree(temp_path)
|
33 |
+
os.makedirs(temp_path, exist_ok=True)
|
34 |
+
|
35 |
+
llm_config = incoming_project["llm"]
|
36 |
+
intents = incoming_project["intents"]
|
37 |
+
|
38 |
+
temp_instance = LLMModel()
|
39 |
+
|
40 |
+
# 🆕 Yeni proje ekleniyor
|
41 |
+
if current_project is None:
|
42 |
+
log(f"🆕 Yeni proje '{project_name}' tespit edildi, yükleme başlatılıyor...")
|
43 |
+
|
44 |
+
temp_instance.setup(service_config, llm_config, temp_path)
|
45 |
+
intent_model_path = os.path.join(temp_path, "intent", "trained_model")
|
46 |
+
background_training(
|
47 |
+
project_name,
|
48 |
+
intents,
|
49 |
+
llm_config["intent_model_id"],
|
50 |
+
intent_model_path,
|
51 |
+
llm_config["train_confidence_treshold"]
|
52 |
+
)
|
53 |
+
temp_instance.load_intent_model(intent_model_path)
|
54 |
+
|
55 |
+
if os.path.exists(project_path):
|
56 |
+
shutil.rmtree(project_path)
|
57 |
+
shutil.copytree(temp_path, project_path)
|
58 |
+
|
59 |
+
llm_models[project_name] = temp_instance
|
60 |
+
service_config.projects[project_name] = incoming_project
|
61 |
+
|
62 |
+
log(f"✅ Yeni proje '{project_name}' başarıyla yüklendi ve belleğe alındı.")
|
63 |
+
return
|
64 |
+
|
65 |
+
# 🔄 Var olan projede değişiklik varsa güncelle
|
66 |
+
if current_project == incoming_project:
|
67 |
+
log(f"ℹ️ '{project_name}' için değişiklik bulunamadı, işlem atlandı.")
|
68 |
+
return
|
69 |
+
|
70 |
+
log(f"🔄 '{project_name}' güncellemesi tespit edildi, güncelleme başlatılıyor...")
|
71 |
+
|
72 |
+
# Ana model değiştiyse yükle
|
73 |
+
if current_project["llm"]["model_base"] != llm_config["model_base"]:
|
74 |
+
temp_instance.setup(service_config, llm_config, temp_path)
|
75 |
+
else:
|
76 |
+
temp_instance.model = llm_models[project_name].model
|
77 |
+
temp_instance.tokenizer = llm_models[project_name].tokenizer
|
78 |
+
|
79 |
+
# Intent değiştiyse yeniden eğit
|
80 |
+
if current_project["intents"] != intents:
|
81 |
+
intent_model_path = os.path.join(temp_path, "intent", "trained_model")
|
82 |
+
background_training(
|
83 |
+
project_name,
|
84 |
+
intents,
|
85 |
+
llm_config["intent_model_id"],
|
86 |
+
intent_model_path,
|
87 |
+
llm_config["train_confidence_treshold"]
|
88 |
+
)
|
89 |
+
temp_instance.load_intent_model(intent_model_path)
|
90 |
+
else:
|
91 |
+
temp_instance.intent_model = llm_models[project_name].intent_model
|
92 |
+
temp_instance.intent_tokenizer = llm_models[project_name].intent_tokenizer
|
93 |
+
temp_instance.intent_label2id = llm_models[project_name].intent_label2id
|
94 |
+
|
95 |
+
if os.path.exists(project_path):
|
96 |
+
shutil.rmtree(project_path)
|
97 |
+
shutil.copytree(temp_path, project_path)
|
98 |
+
|
99 |
+
llm_models[project_name] = temp_instance
|
100 |
+
service_config.projects[project_name] = incoming_project
|
101 |
+
|
102 |
+
log(f"✅ '{project_name}' güncellemesi tamamlandı ve belleğe alındı.")
|
103 |
+
|
104 |
+
except Exception as e:
|
105 |
+
log(f"❌ reload_config background hatası: {e}")
|
106 |
+
|
107 |
+
# Arka planda başlat
|
108 |
+
threading.Thread(target=background_reload, daemon=True).start()
|
109 |
+
|
110 |
+
return {
|
111 |
+
"status": "accepted",
|
112 |
+
"message": f"'{project_name}' için güncelleme arka planda başlatıldı. İşlem loglardan takip edilebilir."
|
113 |
+
}
|