ciyidogan commited on
Commit
4c88d20
Β·
verified Β·
1 Parent(s): c31df77

Update chat_handler.py

Browse files
Files changed (1) hide show
  1. chat_handler.py +46 -38
chat_handler.py CHANGED
@@ -80,59 +80,67 @@ def setup_llm_provider():
80
  log("❌ SPARK_TOKEN not found")
81
  raise ValueError("SPARK_TOKEN not configured")
82
 
83
- spark_endpoint = str(cfg.global_config.spark_endpoint)
84
- llm_provider = SparkLLM(spark_endpoint, spark_token)
85
- return
86
-
87
- # Parse internal prompt format: "provider:model"
88
- parts = internal_prompt.split(":", 1)
89
- if len(parts) != 2:
90
- log(f"⚠️ Invalid internal_prompt format: {internal_prompt}, using Spark")
91
- spark_token = _get_spark_token()
92
- if not spark_token:
93
- log("❌ SPARK_TOKEN not found")
94
- raise ValueError("SPARK_TOKEN not configured")
95
 
96
- spark_endpoint = str(cfg.global_config.spark_endpoint)
97
- llm_provider = SparkLLM(spark_endpoint, spark_token)
98
  return
99
 
100
- provider, model = parts[0].lower(), parts[1]
101
-
102
- if provider == "openai":
103
- # Get API key from environment
104
- api_key = os.getenv("OPENAI_API_KEY")
105
  if not api_key:
106
- log("❌ OPENAI_API_KEY not found in environment")
107
- # Fallback to Spark
108
- spark_token = _get_spark_token()
109
- if not spark_token:
110
- raise ValueError("Neither OPENAI_API_KEY nor SPARK_TOKEN configured")
111
-
112
- spark_endpoint = str(cfg.global_config.spark_endpoint)
113
- llm_provider = SparkLLM(spark_endpoint, spark_token)
114
- return
115
 
116
- log(f"πŸ€– Using OpenAI with model: {model}")
117
- llm_provider = GPT4oLLM(api_key, model)
 
 
 
 
 
118
  else:
119
- log(f"⚠️ Unknown provider: {provider}, using Spark")
120
  spark_token = _get_spark_token()
121
  if not spark_token:
122
  raise ValueError("SPARK_TOKEN not configured")
123
 
124
- spark_endpoint = str(cfg.global_config.spark_endpoint)
125
- llm_provider = SparkLLM(spark_endpoint, spark_token)
 
 
 
 
 
 
 
126
 
127
  def _get_spark_token() -> Optional[str]:
128
- """Get Spark token based on work_mode"""
129
  cfg = ConfigProvider.get()
130
 
131
  if cfg.global_config.is_cloud_mode():
132
- # Cloud mode - use HuggingFace Secrets
133
- token = os.getenv("SPARK_TOKEN")
134
- if not token:
135
- log("❌ SPARK_TOKEN not found in HuggingFace Secrets!")
136
  return token
137
  else:
138
  # On-premise mode - use .env file
 
80
  log("❌ SPARK_TOKEN not found")
81
  raise ValueError("SPARK_TOKEN not configured")
82
 
83
+ spark_endpoint = str(cfg.global_config.spark_endpoint).rstrip("/")
84
+ work_mode = cfg.global_config.work_mode
85
+
86
+ log(f"πŸ”Œ Initializing SparkLLM: {spark_endpoint}")
87
+ log(f"πŸ”§ Work mode: {work_mode}")
88
+
89
+ llm_provider = SparkLLM(
90
+ spark_endpoint=spark_endpoint,
91
+ spark_token=spark_token,
92
+ work_mode=work_mode
93
+ )
 
94
 
95
+ log("βœ… SparkLLM initialized")
 
96
  return
97
 
98
+ # Check if it's a GPT-4o config
99
+ if internal_prompt.get("provider") == "gpt-4o":
100
+ api_key = internal_prompt.get("api_key")
 
 
101
  if not api_key:
102
+ if cfg.global_config.is_cloud_mode():
103
+ api_key = os.environ.get("OPENAI_API_KEY")
104
+ else:
105
+ from dotenv import load_dotenv
106
+ load_dotenv()
107
+ api_key = os.getenv("OPENAI_API_KEY")
108
+
109
+ if not api_key:
110
+ raise ValueError("OpenAI API key not configured")
111
 
112
+ llm_provider = GPT4oLLM(
113
+ api_key=api_key,
114
+ model=internal_prompt.get("model", "gpt-4o"),
115
+ max_tokens=internal_prompt.get("max_tokens", 4096),
116
+ temperature=internal_prompt.get("temperature", 0.7)
117
+ )
118
+ log("βœ… GPT-4o LLM initialized")
119
  else:
120
+ # Default to Spark
121
  spark_token = _get_spark_token()
122
  if not spark_token:
123
  raise ValueError("SPARK_TOKEN not configured")
124
 
125
+ spark_endpoint = str(cfg.global_config.spark_endpoint).rstrip("/")
126
+ work_mode = cfg.global_config.work_mode
127
+
128
+ llm_provider = SparkLLM(
129
+ spark_endpoint=spark_endpoint,
130
+ spark_token=spark_token,
131
+ work_mode=work_mode
132
+ )
133
+ log("βœ… SparkLLM initialized (via internal_prompt)")
134
 
135
  def _get_spark_token() -> Optional[str]:
136
+ """Get Spark token based on work mode"""
137
  cfg = ConfigProvider.get()
138
 
139
  if cfg.global_config.is_cloud_mode():
140
+ # Cloud mode - use HuggingFace secrets
141
+ token = os.environ.get("SPARK_TOKEN")
142
+ if token:
143
+ log("πŸ”‘ Using SPARK_TOKEN from environment")
144
  return token
145
  else:
146
  # On-premise mode - use .env file