File size: 2,444 Bytes
d379bc3
 
 
 
 
 
 
 
7705b09
 
 
 
d379bc3
 
 
 
 
 
 
 
c2e0e06
 
d379bc3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c2e0e06
d379bc3
 
c2e0e06
d379bc3
c2e0e06
 
 
 
 
 
d379bc3
 
c2e0e06
d379bc3
 
 
c2e0e06
 
d379bc3
 
 
 
c2e0e06
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
from fastapi import FastAPI
from controllers import chat_controller, test_controller, admin_controller, health_controller
from core import service_config, session_store, llm_models, INTENT_MODELS
from llm_model import LLMModel
from log import log
from transformers import AutoTokenizer, AutoModelForSequenceClassification
import json
import os
import warnings

# FutureWarning'leri sustur
warnings.simplefilter(action='ignore', category=FutureWarning)

app = FastAPI()

app.include_router(health_controller.router)
app.include_router(chat_controller.router)
app.include_router(test_controller.router)
app.include_router(admin_controller.router)

BASE_PROJECTS_DIR = "/data/projects"

def load_intent_model_on_startup(project_name, model_path):
    log(f"🔧 Intent modeli yükleniyor: {project_name}{model_path}")
    try:
        tokenizer = AutoTokenizer.from_pretrained(model_path)
        model = AutoModelForSequenceClassification.from_pretrained(model_path)
        with open(os.path.join(model_path, "label2id.json")) as f:
            label2id = json.load(f)

        INTENT_MODELS[project_name] = {
            "model": model,
            "tokenizer": tokenizer,
            "label2id": label2id
        }
        log(f"✅ Intent modeli yüklendi: {project_name}")
    except Exception as e:
        log(f"❌ Intent modeli yüklenemedi: {project_name}, Hata: {e}")

# Startup yüklemeleri
log("🌐 Servis başlatılıyor...")
service_config.load(is_reload=False)

for project_name in service_config.projects:
    project_path = os.path.join(BASE_PROJECTS_DIR, project_name)
    os.makedirs(project_path, exist_ok=True)
    os.makedirs(os.path.join(project_path, "llm", "base_model"), exist_ok=True)
    os.makedirs(os.path.join(project_path, "llm", "fine_tune"), exist_ok=True)
    os.makedirs(os.path.join(project_path, "intent", "trained_model"), exist_ok=True)

    llm_config = service_config.get_project_llm_config(project_name)
    model_instance = LLMModel()
    model_instance.setup(service_config, llm_config, project_path)
    llm_models[project_name] = model_instance
    log(f"✅ '{project_name}' için LLM modeli yüklendi.")

    intent_model_path = os.path.join(project_path, "intent", "trained_model")
    if os.path.exists(intent_model_path):
        load_intent_model_on_startup(project_name, intent_model_path)

if __name__ == "__main__":
    import uvicorn
    uvicorn.run(app, host="0.0.0.0", port=7860)