Spaces:
Paused
Paused
File size: 3,277 Bytes
e1fecdb 96453da 6d2594f e1fecdb 96453da 816d82f 96453da 2c98904 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 |
from fastapi import APIRouter, Request
from core import service_config, llm_models
from llm_model import LLMModel
from log import log
import json, os, shutil, threading
router = APIRouter()
@router.post("/reload_config")
async def reload_config(request: Request):
body = await request.json()
project_name = body.get("project_name")
new_config_data = body.get("service_config")
if not project_name or not new_config_data:
return {"error": "project_name ve service_config gereklidir."}
def background_reload():
try:
current_project = service_config.projects.get(project_name)
incoming_project = new_config_data.get("projects", {}).get(project_name)
if not incoming_project:
log(f"❌ '{project_name}' yeni config içinde bulunamadı, işlem durduruldu.")
return
project_path = f"/data/projects/{project_name}"
temp_path = f"/data/projects/{project_name}_temp"
if os.path.exists(temp_path):
shutil.rmtree(temp_path)
os.makedirs(temp_path, exist_ok=True)
llm_config = incoming_project["llm"]
temp_instance = LLMModel()
# 🆕 Yeni proje ekleniyor
if current_project is None:
log(f"🆕 Yeni proje '{project_name}' tespit edildi, yükleme başlatılıyor...")
temp_instance.setup(service_config, llm_config, temp_path)
if os.path.exists(project_path):
shutil.rmtree(project_path)
shutil.copytree(temp_path, project_path)
llm_models[project_name] = temp_instance
service_config.projects[project_name] = incoming_project
log(f"✅ Yeni proje '{project_name}' başarıyla yüklendi ve belleğe alındı.")
return
# 🔄 Var olan projede değişiklik varsa güncelle
if current_project == incoming_project:
log(f"ℹ️ '{project_name}' için değişiklik bulunamadı, işlem atlandı.")
return
log(f"🔄 '{project_name}' güncellemesi tespit edildi, güncelleme başlatılıyor...")
# Ana model değiştiyse yükle
if current_project["llm"]["model_base"] != llm_config["model_base"]:
temp_instance.setup(service_config, llm_config, temp_path)
else:
temp_instance.model = llm_models[project_name].model
temp_instance.tokenizer = llm_models[project_name].tokenizer
if os.path.exists(project_path):
shutil.rmtree(project_path)
shutil.copytree(temp_path, project_path)
llm_models[project_name] = temp_instance
service_config.projects[project_name] = incoming_project
log(f"✅ '{project_name}' güncellemesi tamamlandı ve belleğe alındı.")
except Exception as e:
log(f"❌ reload_config background hatası: {e}")
threading.Thread(target=background_reload, daemon=True).start()
return {
"status": "accepted",
"message": f"'{project_name}' için güncelleme arka planda başlatıldı. İşlem loglardan takip edilebilir."
} |