ciyidogan commited on
Commit
d379bc3
·
verified ·
1 Parent(s): bb3c450

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +50 -49
app.py CHANGED
@@ -1,49 +1,50 @@
1
- from fastapi import FastAPI
2
- from controllers import chat_controller, test_controller, admin_controller, health_controller
3
- from core import service_config, session_store, llm_models, INTENT_MODELS
4
- from llm_model import LLMModel
5
- from log import log
6
- from transformers import AutoTokenizer, AutoModelForSequenceClassification
7
- import json
8
- import os
9
-
10
- app = FastAPI()
11
-
12
- app.include_router(health_controller.router)
13
- app.include_router(chat_controller.router)
14
- app.include_router(test_controller.router)
15
- app.include_router(admin_controller.router)
16
-
17
- def load_intent_model_on_startup(project_name, model_path):
18
- log(f"🔧 Intent modeli yükleniyor: {project_name} → {model_path}")
19
- try:
20
- tokenizer = AutoTokenizer.from_pretrained(model_path)
21
- model = AutoModelForSequenceClassification.from_pretrained(model_path)
22
- with open(os.path.join(model_path, "label2id.json")) as f:
23
- label2id = json.load(f)
24
-
25
- INTENT_MODELS[project_name] = {
26
- "model": model,
27
- "tokenizer": tokenizer,
28
- "label2id": label2id
29
- }
30
- log(f"✅ Intent modeli yüklendi: {project_name}")
31
- except Exception as e:
32
- log(f"❌ Intent modeli yüklenemedi: {project_name}, Hata: {e}")
33
-
34
- if __name__ == "__main__":
35
- log("🌐 Servis başlatılıyor...")
36
- service_config.load(is_reload=False)
37
- for project_name in service_config.projects:
38
- llm_config = service_config.get_project_llm_config(project_name)
39
- model_instance = LLMModel()
40
- model_instance.setup(service_config, llm_config)
41
- llm_models[project_name] = model_instance
42
- log(f"✅ '{project_name}' için LLM modeli yüklendi.")
43
-
44
- intent_model_path = llm_config.get("intent_model_path")
45
- if intent_model_path:
46
- load_intent_model_on_startup(project_name, intent_model_path)
47
-
48
- import uvicorn
49
- uvicorn.run(app, host="0.0.0.0", port=7860)
 
 
1
+ from fastapi import FastAPI
2
+ from controllers import chat_controller, test_controller, admin_controller, health_controller
3
+ from core import service_config, session_store, llm_models, INTENT_MODELS
4
+ from llm_model import LLMModel
5
+ from log import log
6
+ from transformers import AutoTokenizer, AutoModelForSequenceClassification
7
+ import json
8
+ import os
9
+
10
+ app = FastAPI()
11
+
12
+ app.include_router(health_controller.router)
13
+ app.include_router(chat_controller.router)
14
+ app.include_router(test_controller.router)
15
+ app.include_router(admin_controller.router)
16
+
17
+ def load_intent_model_on_startup(project_name, model_path):
18
+ log(f"🔧 Intent modeli yükleniyor: {project_name} → {model_path}")
19
+ try:
20
+ tokenizer = AutoTokenizer.from_pretrained(model_path)
21
+ model = AutoModelForSequenceClassification.from_pretrained(model_path)
22
+ with open(os.path.join(model_path, "label2id.json")) as f:
23
+ label2id = json.load(f)
24
+
25
+ INTENT_MODELS[project_name] = {
26
+ "model": model,
27
+ "tokenizer": tokenizer,
28
+ "label2id": label2id
29
+ }
30
+ log(f"✅ Intent modeli yüklendi: {project_name}")
31
+ except Exception as e:
32
+ log(f"❌ Intent modeli yüklenemedi: {project_name}, Hata: {e}")
33
+
34
+ # Bu blok artık dışarıda çalışacak, hem __main__ hem uvicorn için
35
+ log("🌐 Servis başlatılıyor...")
36
+ service_config.load(is_reload=False)
37
+ for project_name in service_config.projects:
38
+ llm_config = service_config.get_project_llm_config(project_name)
39
+ model_instance = LLMModel()
40
+ model_instance.setup(service_config, llm_config)
41
+ llm_models[project_name] = model_instance
42
+ log(f"✅ '{project_name}' için LLM modeli yüklendi.")
43
+
44
+ intent_model_path = llm_config.get("intent_model_path")
45
+ if intent_model_path:
46
+ load_intent_model_on_startup(project_name, intent_model_path)
47
+
48
+ if __name__ == "__main__":
49
+ import uvicorn
50
+ uvicorn.run(app, host="0.0.0.0", port=7860)