Spaces:
Paused
Paused
File size: 2,657 Bytes
d379bc3 424a139 d379bc3 d5c5b11 d379bc3 7705b09 d379bc3 c2e0e06 424a139 d5c5b11 424a139 d5c5b11 424a139 d5c5b11 424a139 d379bc3 c2e0e06 d379bc3 c2e0e06 424a139 d379bc3 424a139 d379bc3 424a139 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 |
from fastapi import FastAPI
from controllers import chat_controller, test_controller, admin_controller, health_controller
from core import service_config, session_store, llm_models
from llm_model import LLMModel
from intent_utils import background_training
from log import log
import os
import warnings
# FutureWarning'leri sustur
warnings.simplefilter(action='ignore', category=FutureWarning)
app = FastAPI()
app.include_router(health_controller.router)
app.include_router(chat_controller.router)
app.include_router(test_controller.router)
app.include_router(admin_controller.router)
BASE_PROJECTS_DIR = "/data/projects"
def load_project(project_name, config, project_path):
llm_config = config.get_project_llm_config(project_name)
model_instance = LLMModel()
model_instance.setup(config, llm_config, project_path)
# Intent modeli path
intent_model_path = os.path.join(project_path, "intent", "trained_model")
# Eğer intent modeli klasörü yoksa → eğitim başlat
if not os.path.exists(intent_model_path) or not os.path.isfile(os.path.join(intent_model_path, "config.json")):
log(f"🛠 Intent modeli bulunamadı, eğitim başlatılıyor: {intent_model_path}")
intents = config.get_project_intents(project_name)
os.makedirs(intent_model_path, exist_ok=True)
background_training(
project_name,
intents,
llm_config["intent_model_id"],
intent_model_path,
llm_config["train_confidence_treshold"]
)
# Eğitim sonrası intent modelini yükle
if os.path.exists(intent_model_path) and os.path.isfile(os.path.join(intent_model_path, "config.json")):
model_instance.load_intent_model(intent_model_path)
else:
log(f"⚠️ Intent modeli yüklenemedi: {intent_model_path}, yükleme atlandı.")
return model_instance
log("🌐 Servis başlatılıyor...")
service_config.load(is_reload=False)
for project_name in service_config.projects:
project_path = os.path.join(BASE_PROJECTS_DIR, project_name)
os.makedirs(project_path, exist_ok=True)
os.makedirs(os.path.join(project_path, "llm", "base_model"), exist_ok=True)
os.makedirs(os.path.join(project_path, "llm", "fine_tune"), exist_ok=True)
os.makedirs(os.path.join(project_path, "intent", "trained_model"), exist_ok=True)
model_instance = load_project(project_name, service_config, project_path)
llm_models[project_name] = model_instance
log(f"✅ '{project_name}' için tüm modeller yüklenip belleğe alındı.")
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=7860)
|