ciyidogan commited on
Commit
1fd319a
·
verified ·
1 Parent(s): cb61e8e

Update controllers/admin_controller.py

Browse files
Files changed (1) hide show
  1. controllers/admin_controller.py +81 -113
controllers/admin_controller.py CHANGED
@@ -1,113 +1,81 @@
1
- from fastapi import APIRouter, Request
2
- from core import service_config, llm_models
3
- from llm_model import LLMModel
4
- from intent_utils import background_training
5
- from log import log
6
- import json, os, shutil, threading
7
-
8
- router = APIRouter()
9
-
10
- @router.post("/reload_config")
11
- async def reload_config(request: Request):
12
- body = await request.json()
13
- project_name = body.get("project_name")
14
- new_config_data = body.get("service_config")
15
-
16
- if not project_name or not new_config_data:
17
- return {"error": "project_name ve service_config gereklidir."}
18
-
19
- def background_reload():
20
- try:
21
- current_project = service_config.projects.get(project_name)
22
- incoming_project = new_config_data.get("projects", {}).get(project_name)
23
-
24
- if not incoming_project:
25
- log(f"❌ '{project_name}' yeni config içinde bulunamadı, işlem durduruldu.")
26
- return
27
-
28
- project_path = f"/data/projects/{project_name}"
29
- temp_path = f"/data/projects/{project_name}_temp"
30
-
31
- if os.path.exists(temp_path):
32
- shutil.rmtree(temp_path)
33
- os.makedirs(temp_path, exist_ok=True)
34
-
35
- llm_config = incoming_project["llm"]
36
- intents = incoming_project["intents"]
37
-
38
- temp_instance = LLMModel()
39
-
40
- # 🆕 Yeni proje ekleniyor
41
- if current_project is None:
42
- log(f"🆕 Yeni proje '{project_name}' tespit edildi, yükleme başlatılıyor...")
43
-
44
- temp_instance.setup(service_config, llm_config, temp_path)
45
- intent_model_path = os.path.join(temp_path, "intent", "trained_model")
46
- background_training(
47
- project_name,
48
- intents,
49
- llm_config["intent_model_id"],
50
- intent_model_path,
51
- llm_config["train_confidence_treshold"]
52
- )
53
- temp_instance.load_intent_model(intent_model_path)
54
-
55
- if os.path.exists(project_path):
56
- shutil.rmtree(project_path)
57
- shutil.copytree(temp_path, project_path)
58
-
59
- llm_models[project_name] = temp_instance
60
- service_config.projects[project_name] = incoming_project
61
-
62
- log(f"✅ Yeni proje '{project_name}' başarıyla yüklendi ve belleğe alındı.")
63
- return
64
-
65
- # 🔄 Var olan projede değişiklik varsa güncelle
66
- if current_project == incoming_project:
67
- log(f"ℹ️ '{project_name}' için değişiklik bulunamadı, işlem atlandı.")
68
- return
69
-
70
- log(f"🔄 '{project_name}' güncellemesi tespit edildi, güncelleme başlatılıyor...")
71
-
72
- # Ana model değiştiyse yükle
73
- if current_project["llm"]["model_base"] != llm_config["model_base"]:
74
- temp_instance.setup(service_config, llm_config, temp_path)
75
- else:
76
- temp_instance.model = llm_models[project_name].model
77
- temp_instance.tokenizer = llm_models[project_name].tokenizer
78
-
79
- # Intent değiştiyse yeniden eğit
80
- if current_project["intents"] != intents:
81
- intent_model_path = os.path.join(temp_path, "intent", "trained_model")
82
- background_training(
83
- project_name,
84
- intents,
85
- llm_config["intent_model_id"],
86
- intent_model_path,
87
- llm_config["train_confidence_treshold"]
88
- )
89
- temp_instance.load_intent_model(intent_model_path)
90
- else:
91
- temp_instance.intent_model = llm_models[project_name].intent_model
92
- temp_instance.intent_tokenizer = llm_models[project_name].intent_tokenizer
93
- temp_instance.intent_label2id = llm_models[project_name].intent_label2id
94
-
95
- if os.path.exists(project_path):
96
- shutil.rmtree(project_path)
97
- shutil.copytree(temp_path, project_path)
98
-
99
- llm_models[project_name] = temp_instance
100
- service_config.projects[project_name] = incoming_project
101
-
102
- log(f"✅ '{project_name}' güncellemesi tamamlandı ve belleğe alındı.")
103
-
104
- except Exception as e:
105
- log(f"❌ reload_config background hatası: {e}")
106
-
107
- # Arka planda başlat
108
- threading.Thread(target=background_reload, daemon=True).start()
109
-
110
- return {
111
- "status": "accepted",
112
- "message": f"'{project_name}' için güncelleme arka planda başlatıldı. İşlem loglardan takip edilebilir."
113
- }
 
1
+ from fastapi import APIRouter, Request
2
+ from core import service_config, llm_models
3
+ from llm_model import LLMModel
4
+ from log import log
5
+ import os, shutil, threading
6
+
7
+ router = APIRouter()
8
+
9
+ @router.post("/reload_config")
10
+ async def reload_config(request: Request):
11
+ body = await request.json()
12
+ project_name = body.get("project_name")
13
+ new_config_data = body.get("service_config")
14
+
15
+ if not project_name or not new_config_data:
16
+ return {"error": "project_name ve service_config gereklidir."}
17
+
18
+ def background_reload():
19
+ try:
20
+ current_project = service_config.projects.get(project_name)
21
+ incoming_project = new_config_data.get("projects", {}).get(project_name)
22
+
23
+ if not incoming_project:
24
+ log(f"❌ '{project_name}' yeni config içinde bulunamadı, işlem durduruldu.")
25
+ return
26
+
27
+ project_path = f"/data/projects/{project_name}"
28
+ temp_path = f"/data/projects/{project_name}_temp"
29
+
30
+ if os.path.exists(temp_path):
31
+ shutil.rmtree(temp_path)
32
+ os.makedirs(temp_path, exist_ok=True)
33
+
34
+ llm_config = incoming_project["llm"]
35
+
36
+ temp_instance = LLMModel()
37
+
38
+ if current_project is None:
39
+ log(f"🆕 Yeni proje '{project_name}' tespit edildi, yükleme başlatılıyor...")
40
+ temp_instance.setup(service_config, llm_config, temp_path)
41
+
42
+ if os.path.exists(project_path):
43
+ shutil.rmtree(project_path)
44
+ shutil.copytree(temp_path, project_path)
45
+
46
+ llm_models[project_name] = temp_instance
47
+ service_config.projects[project_name] = incoming_project
48
+
49
+ log(f"✅ Yeni proje '{project_name}' başarıyla yüklendi ve belleğe alındı.")
50
+ return
51
+
52
+ if current_project == incoming_project:
53
+ log(f"ℹ️ '{project_name}' için değişiklik bulunamadı, işlem atlandı.")
54
+ return
55
+
56
+ log(f"🔄 '{project_name}' güncellemesi tespit edildi, güncelleme başlatılıyor...")
57
+
58
+ if current_project["llm"]["model_base"] != llm_config["model_base"]:
59
+ temp_instance.setup(service_config, llm_config, temp_path)
60
+ else:
61
+ temp_instance.model = llm_models[project_name].model
62
+ temp_instance.tokenizer = llm_models[project_name].tokenizer
63
+
64
+ if os.path.exists(project_path):
65
+ shutil.rmtree(project_path)
66
+ shutil.copytree(temp_path, project_path)
67
+
68
+ llm_models[project_name] = temp_instance
69
+ service_config.projects[project_name] = incoming_project
70
+
71
+ log(f"✅ '{project_name}' güncellemesi tamamlandı ve belleğe alındı.")
72
+
73
+ except Exception as e:
74
+ log(f"❌ reload_config background hatası: {e}")
75
+
76
+ threading.Thread(target=background_reload, daemon=True).start()
77
+
78
+ return {
79
+ "status": "accepted",
80
+ "message": f"'{project_name}' için güncelleme arka planda başlatıldı. İşlem loglardan takip edilebilir."
81
+ }