Spaces:
Paused
Paused
from fastapi import FastAPI | |
from controllers import chat_controller, test_controller, admin_controller, health_controller | |
from core import service_config, session_store, llm_models, INTENT_MODELS | |
from llm_model import LLMModel | |
from log import log | |
from transformers import AutoTokenizer, AutoModelForSequenceClassification | |
import json | |
import os | |
import warnings | |
# FutureWarning'leri sustur | |
warnings.simplefilter(action='ignore', category=FutureWarning) | |
app = FastAPI() | |
app.include_router(health_controller.router) | |
app.include_router(chat_controller.router) | |
app.include_router(test_controller.router) | |
app.include_router(admin_controller.router) | |
BASE_PROJECTS_DIR = "/data/projects" | |
def load_intent_model_on_startup(project_name, model_path): | |
log(f"🔧 Intent modeli yükleniyor: {project_name} → {model_path}") | |
try: | |
tokenizer = AutoTokenizer.from_pretrained(model_path) | |
model = AutoModelForSequenceClassification.from_pretrained(model_path) | |
with open(os.path.join(model_path, "label2id.json")) as f: | |
label2id = json.load(f) | |
INTENT_MODELS[project_name] = { | |
"model": model, | |
"tokenizer": tokenizer, | |
"label2id": label2id | |
} | |
log(f"✅ Intent modeli yüklendi: {project_name}") | |
except Exception as e: | |
log(f"❌ Intent modeli yüklenemedi: {project_name}, Hata: {e}") | |
# Startup yüklemeleri | |
log("🌐 Servis başlatılıyor...") | |
service_config.load(is_reload=False) | |
for project_name in service_config.projects: | |
project_path = os.path.join(BASE_PROJECTS_DIR, project_name) | |
os.makedirs(project_path, exist_ok=True) | |
os.makedirs(os.path.join(project_path, "llm", "base_model"), exist_ok=True) | |
os.makedirs(os.path.join(project_path, "llm", "fine_tune"), exist_ok=True) | |
os.makedirs(os.path.join(project_path, "intent", "trained_model"), exist_ok=True) | |
llm_config = service_config.get_project_llm_config(project_name) | |
model_instance = LLMModel() | |
model_instance.setup(service_config, llm_config, project_path) | |
llm_models[project_name] = model_instance | |
log(f"✅ '{project_name}' için LLM modeli yüklendi.") | |
intent_model_path = os.path.join(project_path, "intent", "trained_model") | |
if os.path.exists(intent_model_path): | |
load_intent_model_on_startup(project_name, intent_model_path) | |
if __name__ == "__main__": | |
import uvicorn | |
uvicorn.run(app, host="0.0.0.0", port=7860) | |