Spaces:
Sleeping
Sleeping
Add DeepSeek-V3
Browse files- deepseek_client.py +98 -0
- llm_client.py +29 -1
- main.py +3 -2
- ui_components.py +2 -2
deepseek_client.py
ADDED
@@ -0,0 +1,98 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import time
|
3 |
+
import requests
|
4 |
+
from typing import List, Dict, Optional
|
5 |
+
from config import Config
|
6 |
+
|
7 |
+
class DeepseekClient:
|
8 |
+
def __init__(self):
|
9 |
+
self.api_key = Config.get_deepseek_key()
|
10 |
+
if not self.api_key:
|
11 |
+
raise ValueError("Deepseek API key is not set")
|
12 |
+
self.base_url = Config.DEEPSEEK_API_BASE
|
13 |
+
self.max_retries = 3
|
14 |
+
self.retry_delay = 2 # Initial delay in seconds
|
15 |
+
|
16 |
+
def create(self, messages: List[Dict[str, str]], model: str = None, response_format: Optional[Dict] = None) -> str:
|
17 |
+
if not self.api_key:
|
18 |
+
raise ValueError("Deepseek API key is not set")
|
19 |
+
|
20 |
+
retries = 0
|
21 |
+
last_error = None
|
22 |
+
|
23 |
+
while retries < self.max_retries:
|
24 |
+
try:
|
25 |
+
headers = {
|
26 |
+
"Authorization": f"Bearer {self.api_key}",
|
27 |
+
"HTTP-Referer": "https://replit.com",
|
28 |
+
"X-Title": "MyChatMe",
|
29 |
+
"Content-Type": "application/json"
|
30 |
+
}
|
31 |
+
|
32 |
+
data = {
|
33 |
+
"model": model or Config.AUTO_MODEL,
|
34 |
+
"messages": messages,
|
35 |
+
"temperature": 0.7,
|
36 |
+
"max_tokens": 1000
|
37 |
+
}
|
38 |
+
|
39 |
+
if response_format:
|
40 |
+
data["response_format"] = response_format
|
41 |
+
|
42 |
+
response = requests.post(
|
43 |
+
f"{self.base_url}/chat/completions",
|
44 |
+
headers=headers,
|
45 |
+
json=data,
|
46 |
+
timeout=30 # Added timeout
|
47 |
+
)
|
48 |
+
|
49 |
+
if response.status_code == 429:
|
50 |
+
error_data = response.json()
|
51 |
+
error_message = error_data.get('error', {}).get('message', 'Rate limit exceeded')
|
52 |
+
print(f"Rate limit error: {error_message}")
|
53 |
+
|
54 |
+
# Check if it's a provider-specific rate limit
|
55 |
+
if 'metadata' in error_data.get('error', {}):
|
56 |
+
provider = error_data['error']['metadata'].get('provider_name', 'Unknown')
|
57 |
+
raise Exception(f"Rate limit exceeded for provider: {provider}")
|
58 |
+
|
59 |
+
wait_time = self.retry_delay * (2 ** retries)
|
60 |
+
print(f"Rate limit exceeded. Waiting {wait_time} seconds before retry...")
|
61 |
+
time.sleep(wait_time)
|
62 |
+
retries += 1
|
63 |
+
continue
|
64 |
+
|
65 |
+
response.raise_for_status()
|
66 |
+
response_data = response.json()
|
67 |
+
|
68 |
+
if "error" in response_data:
|
69 |
+
raise ValueError(f"Deepseek API returned error: {response_data['error']}")
|
70 |
+
|
71 |
+
if not response_data.get('choices'):
|
72 |
+
raise ValueError("No choices in Deepseek API response")
|
73 |
+
|
74 |
+
if not response_data['choices'][0].get('message'):
|
75 |
+
raise ValueError("No message in Deepseek API response choice")
|
76 |
+
|
77 |
+
return response_data['choices'][0]['message']['content']
|
78 |
+
|
79 |
+
except requests.exceptions.RequestException as e:
|
80 |
+
print(f"Deepseek Request Error: {str(e)}")
|
81 |
+
last_error = e
|
82 |
+
retries += 1
|
83 |
+
if retries < self.max_retries:
|
84 |
+
time.sleep(self.retry_delay * (2 ** retries))
|
85 |
+
except ValueError as e:
|
86 |
+
print(f"Deepseek Value Error: {str(e)}")
|
87 |
+
last_error = e
|
88 |
+
break
|
89 |
+
except Exception as e:
|
90 |
+
print(f"Deepseek Unexpected Error: {str(e)}")
|
91 |
+
last_error = e
|
92 |
+
break
|
93 |
+
|
94 |
+
# If all retries failed or other error occurred
|
95 |
+
error_msg = str(last_error) if last_error else "Maximum retries exceeded"
|
96 |
+
if "Rate limit exceeded" in error_msg:
|
97 |
+
raise Exception(f"Deepseek rate limit exceeded. Please try again later or switch to a different model.")
|
98 |
+
raise Exception(f"Deepseek API error: {error_msg}")
|
llm_client.py
CHANGED
@@ -3,11 +3,13 @@ from typing import List, Dict
|
|
3 |
import openai
|
4 |
from config import Config
|
5 |
from openrouter_client import OpenRouterClient
|
|
|
6 |
|
7 |
class LLMClient:
|
8 |
def __init__(self):
|
9 |
self.openai_client = None
|
10 |
self.openrouter_client = None
|
|
|
11 |
self.test_mode = False # For testing error scenarios
|
12 |
self.initialize_clients()
|
13 |
|
@@ -16,11 +18,14 @@ class LLMClient:
|
|
16 |
try:
|
17 |
openai_key = Config.get_openai_key()
|
18 |
openrouter_key = Config.get_openrouter_key()
|
|
|
19 |
|
20 |
if openai_key:
|
21 |
self.openai_client = openai.OpenAI(api_key=openai_key)
|
22 |
if openrouter_key:
|
23 |
self.openrouter_client = OpenRouterClient()
|
|
|
|
|
24 |
except Exception as e:
|
25 |
print(f"Error initializing API clients: {str(e)}")
|
26 |
|
@@ -65,6 +70,29 @@ class LLMClient:
|
|
65 |
except openai.APIError as e:
|
66 |
raise Exception(f"OpenAI API error: {str(e)}")
|
67 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
68 |
def chat_claude(self, messages: List[Dict[str, str]]) -> str:
|
69 |
"""Send chat completion request to Claude via OpenRouter"""
|
70 |
if not self.openrouter_client:
|
@@ -118,4 +146,4 @@ class LLMClient:
|
|
118 |
return response.choices[0].message.content
|
119 |
except Exception as e:
|
120 |
print(f"Failed to generate context summary: {str(e)}")
|
121 |
-
return "" # Return empty string if summarization fails
|
|
|
3 |
import openai
|
4 |
from config import Config
|
5 |
from openrouter_client import OpenRouterClient
|
6 |
+
from deepseek_client import DeepseekClient
|
7 |
|
8 |
class LLMClient:
|
9 |
def __init__(self):
|
10 |
self.openai_client = None
|
11 |
self.openrouter_client = None
|
12 |
+
self.deepseek_client = None
|
13 |
self.test_mode = False # For testing error scenarios
|
14 |
self.initialize_clients()
|
15 |
|
|
|
18 |
try:
|
19 |
openai_key = Config.get_openai_key()
|
20 |
openrouter_key = Config.get_openrouter_key()
|
21 |
+
deepseek_key = Config.get_deepseek_key()
|
22 |
|
23 |
if openai_key:
|
24 |
self.openai_client = openai.OpenAI(api_key=openai_key)
|
25 |
if openrouter_key:
|
26 |
self.openrouter_client = OpenRouterClient()
|
27 |
+
if deepseek_key:
|
28 |
+
self.deepseek_client = DeepseekClient()
|
29 |
except Exception as e:
|
30 |
print(f"Error initializing API clients: {str(e)}")
|
31 |
|
|
|
70 |
except openai.APIError as e:
|
71 |
raise Exception(f"OpenAI API error: {str(e)}")
|
72 |
|
73 |
+
def chat_deepseek(self, messages: List[Dict[str, str]]) -> str:
|
74 |
+
"""Send chat completion request to Deepseek API"""
|
75 |
+
if not self.deepseek_client:
|
76 |
+
raise ValueError("Deepseek client not initialized. Please check your API key.")
|
77 |
+
|
78 |
+
if self.test_mode:
|
79 |
+
if messages and "test_error" in messages[-1].get("content", "").lower():
|
80 |
+
error_type = messages[-1]["content"].lower()
|
81 |
+
if "api_key" in error_type:
|
82 |
+
raise ValueError("Invalid API key")
|
83 |
+
elif "rate_limit" in error_type:
|
84 |
+
raise Exception("Rate limit exceeded")
|
85 |
+
elif "network" in error_type:
|
86 |
+
raise Exception("Network connection error")
|
87 |
+
|
88 |
+
try:
|
89 |
+
return self.deepseek_client.create(
|
90 |
+
messages=messages,
|
91 |
+
model=Config.DEEPSEEK_MODEL
|
92 |
+
)
|
93 |
+
except Exception as e:
|
94 |
+
raise Exception(f"Deepseek API error: {str(e)}")
|
95 |
+
|
96 |
def chat_claude(self, messages: List[Dict[str, str]]) -> str:
|
97 |
"""Send chat completion request to Claude via OpenRouter"""
|
98 |
if not self.openrouter_client:
|
|
|
146 |
return response.choices[0].message.content
|
147 |
except Exception as e:
|
148 |
print(f"Failed to generate context summary: {str(e)}")
|
149 |
+
return "" # Return empty string if summarization fails
|
main.py
CHANGED
@@ -208,7 +208,8 @@ def main():
|
|
208 |
"OpenRouter-Auto": llm_client.chat_auto,
|
209 |
"OpenAI": llm_client.chat_openai,
|
210 |
"Claude-3.5": llm_client.chat_claude,
|
211 |
-
"Gemini-2.0": llm_client.chat_gemini
|
|
|
212 |
}
|
213 |
|
214 |
if model in model_map:
|
@@ -259,4 +260,4 @@ def main():
|
|
259 |
st.rerun()
|
260 |
|
261 |
if __name__ == "__main__":
|
262 |
-
main()
|
|
|
208 |
"OpenRouter-Auto": llm_client.chat_auto,
|
209 |
"OpenAI": llm_client.chat_openai,
|
210 |
"Claude-3.5": llm_client.chat_claude,
|
211 |
+
"Gemini-2.0": llm_client.chat_gemini,
|
212 |
+
"DeepSeek-V3": llm_client.chat_deepseek
|
213 |
}
|
214 |
|
215 |
if model in model_map:
|
|
|
260 |
st.rerun()
|
261 |
|
262 |
if __name__ == "__main__":
|
263 |
+
main()
|
ui_components.py
CHANGED
@@ -52,7 +52,7 @@ def render_sidebar(i18n, chat_manager):
|
|
52 |
# Model selection - デフォルトをOpenRouter-Autoに設定
|
53 |
model = st.selectbox(
|
54 |
i18n.get_text("model_selection"),
|
55 |
-
["OpenRouter-Auto", "OpenAI", "Claude-3.5", "Gemini-2.0"],
|
56 |
index=0,
|
57 |
key="model_selection"
|
58 |
)
|
@@ -87,4 +87,4 @@ def render_sidebar(i18n, chat_manager):
|
|
87 |
except Exception as e:
|
88 |
show_notification(f"{i18n.get_text('export_error')}: {str(e)}", "error")
|
89 |
|
90 |
-
return language, model
|
|
|
52 |
# Model selection - デフォルトをOpenRouter-Autoに設定
|
53 |
model = st.selectbox(
|
54 |
i18n.get_text("model_selection"),
|
55 |
+
["OpenRouter-Auto", "OpenAI", "Claude-3.5", "Gemini-2.0", "DeepSeek-V3"],
|
56 |
index=0,
|
57 |
key="model_selection"
|
58 |
)
|
|
|
87 |
except Exception as e:
|
88 |
show_notification(f"{i18n.get_text('export_error')}: {str(e)}", "error")
|
89 |
|
90 |
+
return language, model
|