File size: 1,598 Bytes
cb61e8e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
from fastapi import FastAPI
from controllers import chat_controller, test_controller, admin_controller, health_controller
from core import service_config, session_store, llm_models
from llm_model import LLMModel
from log import log
import os
import warnings

warnings.simplefilter(action='ignore', category=FutureWarning)

app = FastAPI()

app.include_router(health_controller.router)
app.include_router(chat_controller.router)
app.include_router(test_controller.router)
app.include_router(admin_controller.router)

BASE_PROJECTS_DIR = "/data/projects"

def load_project(project_name, config, project_path):
    llm_config = config.get_project_llm_config(project_name)
    model_instance = LLMModel()
    model_instance.setup(config, llm_config, project_path)

    # ❌ Intent modeli artık kullanılmıyor → bu blok kaldırıldı

    return model_instance

log("🌐 Servis başlatılıyor...")
service_config.load(is_reload=False)

for project_name in service_config.projects:
    project_path = os.path.join(BASE_PROJECTS_DIR, project_name)
    os.makedirs(project_path, exist_ok=True)
    os.makedirs(os.path.join(project_path, "llm", "base_model"), exist_ok=True)
    os.makedirs(os.path.join(project_path, "llm", "fine_tune"), exist_ok=True)

    model_instance = load_project(project_name, service_config, project_path)
    llm_models[project_name] = model_instance
    log(f"✅ '{project_name}' için tüm modeller yüklenip belleğe alındı.")

if __name__ == "__main__":
    import uvicorn
    uvicorn.run(app, host="0.0.0.0", port=7860)