Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -1,10 +1,8 @@
|
|
1 |
from fastapi import FastAPI
|
2 |
from controllers import chat_controller, test_controller, admin_controller, health_controller
|
3 |
-
from core import service_config, session_store, llm_models
|
4 |
from llm_model import LLMModel
|
5 |
from log import log
|
6 |
-
from transformers import AutoTokenizer, AutoModelForSequenceClassification
|
7 |
-
import json
|
8 |
import os
|
9 |
import warnings
|
10 |
|
@@ -20,22 +18,16 @@ app.include_router(admin_controller.router)
|
|
20 |
|
21 |
BASE_PROJECTS_DIR = "/data/projects"
|
22 |
|
23 |
-
def
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
"tokenizer": tokenizer,
|
34 |
-
"label2id": label2id
|
35 |
-
}
|
36 |
-
log(f"✅ Intent modeli yüklendi: {project_name}")
|
37 |
-
except Exception as e:
|
38 |
-
log(f"❌ Intent modeli yüklenemedi: {project_name}, Hata: {e}")
|
39 |
|
40 |
log("🌐 Servis başlatılıyor...")
|
41 |
service_config.load(is_reload=False)
|
@@ -47,16 +39,10 @@ for project_name in service_config.projects:
|
|
47 |
os.makedirs(os.path.join(project_path, "llm", "fine_tune"), exist_ok=True)
|
48 |
os.makedirs(os.path.join(project_path, "intent", "trained_model"), exist_ok=True)
|
49 |
|
50 |
-
|
51 |
-
model_instance = LLMModel()
|
52 |
-
model_instance.setup(service_config, llm_config, project_path)
|
53 |
llm_models[project_name] = model_instance
|
54 |
-
log(f"✅ '{project_name}' için
|
55 |
-
|
56 |
-
intent_model_path = os.path.join(project_path, "intent", "trained_model")
|
57 |
-
if os.path.exists(intent_model_path):
|
58 |
-
load_intent_model_on_startup(project_name, intent_model_path)
|
59 |
|
60 |
if __name__ == "__main__":
|
61 |
import uvicorn
|
62 |
-
uvicorn.run(app, host="0.0.0.0", port=7860)
|
|
|
1 |
from fastapi import FastAPI
|
2 |
from controllers import chat_controller, test_controller, admin_controller, health_controller
|
3 |
+
from core import service_config, session_store, llm_models
|
4 |
from llm_model import LLMModel
|
5 |
from log import log
|
|
|
|
|
6 |
import os
|
7 |
import warnings
|
8 |
|
|
|
18 |
|
19 |
BASE_PROJECTS_DIR = "/data/projects"
|
20 |
|
21 |
+
def load_project(project_name, config, project_path):
|
22 |
+
llm_config = config.get_project_llm_config(project_name)
|
23 |
+
model_instance = LLMModel()
|
24 |
+
model_instance.setup(config, llm_config, project_path)
|
25 |
+
|
26 |
+
intent_model_path = os.path.join(project_path, "intent", "trained_model")
|
27 |
+
if os.path.exists(intent_model_path):
|
28 |
+
model_instance.load_intent_model(intent_model_path)
|
29 |
+
|
30 |
+
return model_instance
|
|
|
|
|
|
|
|
|
|
|
|
|
31 |
|
32 |
log("🌐 Servis başlatılıyor...")
|
33 |
service_config.load(is_reload=False)
|
|
|
39 |
os.makedirs(os.path.join(project_path, "llm", "fine_tune"), exist_ok=True)
|
40 |
os.makedirs(os.path.join(project_path, "intent", "trained_model"), exist_ok=True)
|
41 |
|
42 |
+
model_instance = load_project(project_name, service_config, project_path)
|
|
|
|
|
43 |
llm_models[project_name] = model_instance
|
44 |
+
log(f"✅ '{project_name}' için tüm modeller yüklenip belleğe alındı.")
|
|
|
|
|
|
|
|
|
45 |
|
46 |
if __name__ == "__main__":
|
47 |
import uvicorn
|
48 |
+
uvicorn.run(app, host="0.0.0.0", port=7860)
|