Spaces:
Paused
Paused
from fastapi import APIRouter, Request | |
from core import service_config, llm_models | |
from llm_model import LLMModel | |
from intent_utils import background_training | |
from log import log | |
import json, os, shutil, threading | |
router = APIRouter() | |
async def reload_config(request: Request): | |
body = await request.json() | |
project_name = body.get("project_name") | |
new_config_data = body.get("service_config") | |
if not project_name or not new_config_data: | |
return {"error": "project_name ve service_config gereklidir."} | |
def background_reload(): | |
try: | |
current_project = service_config.projects.get(project_name) | |
incoming_project = new_config_data.get("projects", {}).get(project_name) | |
if not incoming_project: | |
log(f"❌ '{project_name}' yeni config içinde bulunamadı, işlem durduruldu.") | |
return | |
project_path = f"/data/projects/{project_name}" | |
temp_path = f"/data/projects/{project_name}_temp" | |
if os.path.exists(temp_path): | |
shutil.rmtree(temp_path) | |
os.makedirs(temp_path, exist_ok=True) | |
llm_config = incoming_project["llm"] | |
intents = incoming_project["intents"] | |
temp_instance = LLMModel() | |
# 🆕 Yeni proje ekleniyor | |
if current_project is None: | |
log(f"🆕 Yeni proje '{project_name}' tespit edildi, yükleme başlatılıyor...") | |
temp_instance.setup(service_config, llm_config, temp_path) | |
intent_model_path = os.path.join(temp_path, "intent", "trained_model") | |
background_training( | |
project_name, | |
intents, | |
llm_config["intent_model_id"], | |
intent_model_path, | |
llm_config["train_confidence_treshold"] | |
) | |
temp_instance.load_intent_model(intent_model_path) | |
if os.path.exists(project_path): | |
shutil.rmtree(project_path) | |
shutil.copytree(temp_path, project_path) | |
llm_models[project_name] = temp_instance | |
service_config.projects[project_name] = incoming_project | |
log(f"✅ Yeni proje '{project_name}' başarıyla yüklendi ve belleğe alındı.") | |
return | |
# 🔄 Var olan projede değişiklik varsa güncelle | |
if current_project == incoming_project: | |
log(f"ℹ️ '{project_name}' için değişiklik bulunamadı, işlem atlandı.") | |
return | |
log(f"🔄 '{project_name}' güncellemesi tespit edildi, güncelleme başlatılıyor...") | |
# Ana model değiştiyse yükle | |
if current_project["llm"]["model_base"] != llm_config["model_base"]: | |
temp_instance.setup(service_config, llm_config, temp_path) | |
else: | |
temp_instance.model = llm_models[project_name].model | |
temp_instance.tokenizer = llm_models[project_name].tokenizer | |
# Intent değiştiyse yeniden eğit | |
if current_project["intents"] != intents: | |
intent_model_path = os.path.join(temp_path, "intent", "trained_model") | |
background_training( | |
project_name, | |
intents, | |
llm_config["intent_model_id"], | |
intent_model_path, | |
llm_config["train_confidence_treshold"] | |
) | |
temp_instance.load_intent_model(intent_model_path) | |
else: | |
temp_instance.intent_model = llm_models[project_name].intent_model | |
temp_instance.intent_tokenizer = llm_models[project_name].intent_tokenizer | |
temp_instance.intent_label2id = llm_models[project_name].intent_label2id | |
if os.path.exists(project_path): | |
shutil.rmtree(project_path) | |
shutil.copytree(temp_path, project_path) | |
llm_models[project_name] = temp_instance | |
service_config.projects[project_name] = incoming_project | |
log(f"✅ '{project_name}' güncellemesi tamamlandı ve belleğe alındı.") | |
except Exception as e: | |
log(f"❌ reload_config background hatası: {e}") | |
# Arka planda başlat | |
threading.Thread(target=background_reload, daemon=True).start() | |
return { | |
"status": "accepted", | |
"message": f"'{project_name}' için güncelleme arka planda başlatıldı. İşlem loglardan takip edilebilir." | |
} | |