ciyidogan commited on
Commit
d3a3f62
·
verified ·
1 Parent(s): c13ff1c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -21
app.py CHANGED
@@ -2,10 +2,9 @@ from fastapi import FastAPI
2
  from controllers import chat_controller, test_controller, admin_controller, health_controller
3
  from core import service_config, session_store, llm_models, INTENT_MODELS
4
  from llm_model import LLMModel
5
- from log import log
6
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
7
- import json
8
- import os
9
 
10
  app = FastAPI()
11
 
@@ -14,26 +13,10 @@ app.include_router(chat_controller.router)
14
  app.include_router(test_controller.router)
15
  app.include_router(admin_controller.router)
16
 
17
- def load_intent_model_on_startup(project_name, model_path):
18
- log(f"🔧 Intent modeli yükleniyor: {project_name} → {model_path}")
19
- try:
20
- tokenizer = AutoTokenizer.from_pretrained(model_path)
21
- model = AutoModelForSequenceClassification.from_pretrained(model_path)
22
- with open(os.path.join(model_path, "label2id.json")) as f:
23
- label2id = json.load(f)
24
-
25
- INTENT_MODELS[project_name] = {
26
- "model": model,
27
- "tokenizer": tokenizer,
28
- "label2id": label2id
29
- }
30
- log(f"✅ Intent modeli yüklendi: {project_name}")
31
- except Exception as e:
32
- log(f"❌ Intent modeli yüklenemedi: {project_name}, Hata: {e}")
33
-
34
  if __name__ == "__main__":
35
  log("🌐 Servis başlatılıyor...")
36
  service_config.load(is_reload=False)
 
37
  for project_name in service_config.projects:
38
  llm_config = service_config.get_project_llm_config(project_name)
39
  model_instance = LLMModel()
@@ -41,9 +24,25 @@ if __name__ == "__main__":
41
  llm_models[project_name] = model_instance
42
  log(f"✅ '{project_name}' için LLM modeli yüklendi.")
43
 
 
44
  intent_model_path = llm_config.get("intent_model_path")
45
  if intent_model_path:
46
- load_intent_model_on_startup(project_name, intent_model_path)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
 
48
  import uvicorn
49
  uvicorn.run(app, host="0.0.0.0", port=7860)
 
 
2
  from controllers import chat_controller, test_controller, admin_controller, health_controller
3
  from core import service_config, session_store, llm_models, INTENT_MODELS
4
  from llm_model import LLMModel
 
5
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
6
+ import json, os
7
+ from log import log
8
 
9
  app = FastAPI()
10
 
 
13
  app.include_router(test_controller.router)
14
  app.include_router(admin_controller.router)
15
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  if __name__ == "__main__":
17
  log("🌐 Servis başlatılıyor...")
18
  service_config.load(is_reload=False)
19
+
20
  for project_name in service_config.projects:
21
  llm_config = service_config.get_project_llm_config(project_name)
22
  model_instance = LLMModel()
 
24
  llm_models[project_name] = model_instance
25
  log(f"✅ '{project_name}' için LLM modeli yüklendi.")
26
 
27
+ # ✅ Intent modeli yükleme eklendi
28
  intent_model_path = llm_config.get("intent_model_path")
29
  if intent_model_path:
30
+ try:
31
+ log(f"🔧 Intent modeli yükleniyor: {project_name} → {intent_model_path}")
32
+ tokenizer = AutoTokenizer.from_pretrained(intent_model_path)
33
+ model = AutoModelForSequenceClassification.from_pretrained(intent_model_path)
34
+ with open(os.path.join(intent_model_path, "label2id.json")) as f:
35
+ label2id = json.load(f)
36
+
37
+ INTENT_MODELS[project_name] = {
38
+ "model": model,
39
+ "tokenizer": tokenizer,
40
+ "label2id": label2id
41
+ }
42
+ log(f"✅ Intent modeli yüklendi: {project_name}")
43
+ except Exception as e:
44
+ log(f"❌ Intent modeli yüklenemedi: {project_name}, Hata: {e}")
45
 
46
  import uvicorn
47
  uvicorn.run(app, host="0.0.0.0", port=7860)
48
+