ciyidogan commited on
Commit
2ebef02
Β·
verified Β·
1 Parent(s): 1c637bb

Create llm_factory.py

Browse files
Files changed (1) hide show
  1. llm_factory.py +110 -0
llm_factory.py ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ LLM Provider Factory for Flare
3
+ """
4
+ import os
5
+ from typing import Optional
6
+ from dotenv import load_dotenv
7
+
8
+ from llm_interface import LLMInterface, SparkLLM, GPT4oLLM
9
+ from config_provider import ConfigProvider
10
+ from utils import log
11
+
12
+ class LLMFactory:
13
+ """Factory class to create appropriate LLM provider based on llm_provider config"""
14
+
15
+ @staticmethod
16
+ def create_provider() -> LLMInterface:
17
+ """Create and return appropriate LLM provider based on config"""
18
+ cfg = ConfigProvider.get()
19
+ llm_provider = cfg.global_config.llm_provider
20
+
21
+ log(f"🏭 Creating LLM provider: {llm_provider}")
22
+
23
+ # Get provider config
24
+ provider_config = cfg.global_config.get_llm_provider_config()
25
+ if not provider_config:
26
+ raise ValueError(f"Unknown LLM provider: {llm_provider}")
27
+
28
+ # Get API key
29
+ api_key = LLMFactory._get_api_key()
30
+ if not api_key and provider_config.requires_api_key:
31
+ raise ValueError(f"API key required for {llm_provider} but not configured")
32
+
33
+ # Create appropriate provider
34
+ if llm_provider == "spark":
35
+ return LLMFactory._create_spark_provider(api_key)
36
+ elif llm_provider in ("gpt4o", "gpt4o-mini"):
37
+ return LLMFactory._create_gpt_provider(llm_provider, api_key)
38
+ else:
39
+ raise ValueError(f"Unsupported LLM provider: {llm_provider}")
40
+
41
+ @staticmethod
42
+ def _create_spark_provider(api_key: str) -> SparkLLM:
43
+ """Create Spark LLM provider"""
44
+ cfg = ConfigProvider.get()
45
+
46
+ endpoint = cfg.global_config.llm_provider_endpoint
47
+ if not endpoint:
48
+ raise ValueError("Spark requires llm_provider_endpoint to be configured")
49
+
50
+ log(f"πŸš€ Creating SparkLLM provider")
51
+ log(f"πŸ“ Endpoint: {endpoint}")
52
+
53
+ # Determine work mode for Spark (backward compatibility)
54
+ work_mode = "cloud" # Default
55
+ if not cfg.global_config.is_cloud_mode():
56
+ work_mode = "on-premise"
57
+
58
+ return SparkLLM(
59
+ spark_endpoint=str(endpoint),
60
+ spark_token=api_key,
61
+ work_mode=work_mode
62
+ )
63
+
64
+ @staticmethod
65
+ def _create_gpt_provider(model_type: str, api_key: str) -> GPT4oLLM:
66
+ """Create GPT-4o LLM provider"""
67
+ # Determine model
68
+ model = "gpt-4o-mini" if model_type == "gpt4o-mini" else "gpt-4o"
69
+
70
+ log(f"πŸ€– Creating GPT4oLLM provider with model: {model}")
71
+
72
+ return GPT4oLLM(
73
+ api_key=api_key,
74
+ model=model
75
+ )
76
+
77
+ @staticmethod
78
+ def _get_api_key() -> Optional[str]:
79
+ """Get API key from config or environment"""
80
+ cfg = ConfigProvider.get()
81
+
82
+ # First check encrypted config
83
+ api_key = cfg.global_config.get_plain_api_key()
84
+ if api_key:
85
+ log("πŸ”‘ Using decrypted API key from config")
86
+ return api_key
87
+
88
+ # Then check environment based on provider
89
+ llm_provider = cfg.global_config.llm_provider
90
+
91
+ env_var_map = {
92
+ "spark": "SPARK_TOKEN",
93
+ "gpt4o": "OPENAI_API_KEY",
94
+ "gpt4o-mini": "OPENAI_API_KEY",
95
+ # Add more mappings as needed
96
+ }
97
+
98
+ env_var = env_var_map.get(llm_provider)
99
+ if env_var:
100
+ if cfg.global_config.is_cloud_mode():
101
+ api_key = os.environ.get(env_var)
102
+ if api_key:
103
+ log(f"πŸ”‘ Using {env_var} from environment")
104
+ else:
105
+ load_dotenv()
106
+ api_key = os.getenv(env_var)
107
+ if api_key:
108
+ log(f"πŸ”‘ Using {env_var} from .env file")
109
+
110
+ return api_key