Spaces:
Sleeping
Sleeping
Change DeepSeek Client
Browse files- base_client.py +54 -0
- config.py +5 -2
- deepseek_client.py +24 -82
- main.py +10 -1
- task_history.txt +26 -0
base_client.py
ADDED
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from openai import OpenAI
|
2 |
+
import requests
|
3 |
+
from typing import List, Dict, Optional
|
4 |
+
|
5 |
+
class BaseClient(OpenAI):
|
6 |
+
def __init__(self, api_key, **kwargs):
|
7 |
+
super().__init__(api_key=api_key, **kwargs)
|
8 |
+
|
9 |
+
def openai_chat(self, messages, model="gpt-3.5-turbo", **kwargs):
|
10 |
+
try:
|
11 |
+
response = self.chat.completions.create(
|
12 |
+
model=model,
|
13 |
+
messages=messages,
|
14 |
+
**kwargs
|
15 |
+
)
|
16 |
+
return response.choices[0].message.content
|
17 |
+
except Exception as e:
|
18 |
+
print(f"OpenAI Chat API Error: {e}")
|
19 |
+
return None
|
20 |
+
|
21 |
+
def deepseek_chat(self, messages: List[Dict[str, str]], model: str = "deepseek-chat", **kwargs) -> Optional[str]:
|
22 |
+
try:
|
23 |
+
headers = {
|
24 |
+
"Content-Type": "application/json",
|
25 |
+
"Authorization": f"Bearer {self.api_key}"
|
26 |
+
}
|
27 |
+
|
28 |
+
data = {
|
29 |
+
"model": model,
|
30 |
+
"messages": messages,
|
31 |
+
"temperature": kwargs.get("temperature", 0.7),
|
32 |
+
"max_tokens": kwargs.get("max_tokens", 1000),
|
33 |
+
"stream": False
|
34 |
+
}
|
35 |
+
|
36 |
+
response = requests.post(
|
37 |
+
"https://api.deepseek.com/v1/chat/completions",
|
38 |
+
headers=headers,
|
39 |
+
json=data
|
40 |
+
)
|
41 |
+
|
42 |
+
if response.status_code != 200:
|
43 |
+
raise ValueError(f"Deepseek API request failed with status {response.status_code}")
|
44 |
+
|
45 |
+
response_data = response.json()
|
46 |
+
|
47 |
+
if not response_data.get("choices"):
|
48 |
+
raise ValueError("No choices in Deepseek API response")
|
49 |
+
|
50 |
+
return response_data["choices"][0]["message"]["content"]
|
51 |
+
|
52 |
+
except Exception as e:
|
53 |
+
print(f"Deepseek Chat API Error: {e}")
|
54 |
+
return None
|
config.py
CHANGED
@@ -5,6 +5,9 @@ import pytz
|
|
5 |
|
6 |
@dataclass
|
7 |
class Config:
|
|
|
|
|
|
|
8 |
# the newest OpenAI model is "gpt-4o" which was released May 13, 2024.
|
9 |
OPENAI_MODEL = "gpt-4o-mini-2024-07-18" # Changed from "gpt-4o-mini-2024-07-18" to "gpt-4o" "o1-mini-2024-09-12"
|
10 |
GEMINI_MODEL = "google/gemini-2.0-flash-exp:free"
|
@@ -31,8 +34,8 @@ class Config:
|
|
31 |
]
|
32 |
|
33 |
# DeepSeek model configuration
|
34 |
-
DEEPSEEK_MODEL = "deepseek-chat
|
35 |
-
DEEPSEEK_API_BASE = "https://api.deepseek.com"
|
36 |
|
37 |
# Chat context settings
|
38 |
MAX_HISTORY_CHATS = 10
|
|
|
5 |
|
6 |
@dataclass
|
7 |
class Config:
|
8 |
+
# Version
|
9 |
+
VERSION = "1.0.0"
|
10 |
+
|
11 |
# the newest OpenAI model is "gpt-4o" which was released May 13, 2024.
|
12 |
OPENAI_MODEL = "gpt-4o-mini-2024-07-18" # Changed from "gpt-4o-mini-2024-07-18" to "gpt-4o" "o1-mini-2024-09-12"
|
13 |
GEMINI_MODEL = "google/gemini-2.0-flash-exp:free"
|
|
|
34 |
]
|
35 |
|
36 |
# DeepSeek model configuration
|
37 |
+
DEEPSEEK_MODEL = "deepseek-chat"
|
38 |
+
DEEPSEEK_API_BASE = "https://api.deepseek.com/v1/chat/completions"
|
39 |
|
40 |
# Chat context settings
|
41 |
MAX_HISTORY_CHATS = 10
|
deepseek_client.py
CHANGED
@@ -1,86 +1,28 @@
|
|
1 |
import os
|
2 |
-
import
|
3 |
-
import requests
|
4 |
-
from typing import List, Dict, Optional
|
5 |
from config import Config
|
6 |
-
from
|
7 |
|
8 |
-
class DeepseekClient(
|
9 |
def __init__(self, api_key=None, **kwargs):
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
print(f"
|
19 |
-
print(f"
|
20 |
-
print(f"
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
print(f" First message: {messages[0]['content'][:50]}...")
|
32 |
-
|
33 |
-
retries = 0
|
34 |
-
last_error = None
|
35 |
-
|
36 |
-
while retries < self.max_retries:
|
37 |
-
try:
|
38 |
-
print(f"[DeepseekClient] Attempting API request (retry {retries + 1}/{self.max_retries})")
|
39 |
-
response = self.chat.completions.create(
|
40 |
-
model=model or Config.DEEPSEEK_MODEL,
|
41 |
-
messages=messages,
|
42 |
-
temperature=0.7,
|
43 |
-
max_tokens=1000,
|
44 |
-
response_format=response_format,
|
45 |
-
**kwargs
|
46 |
-
)
|
47 |
-
|
48 |
-
response_data = response.model_dump()
|
49 |
-
|
50 |
-
if "error" in response_data:
|
51 |
-
raise ValueError(f"Deepseek API returned error: {response_data['error']}")
|
52 |
-
|
53 |
-
if not response_data.get('choices'):
|
54 |
-
raise ValueError("No choices in Deepseek API response")
|
55 |
-
|
56 |
-
if not response_data['choices'][0].get('message'):
|
57 |
-
raise ValueError("No message in Deepseek API response choice")
|
58 |
-
|
59 |
-
content = response_data['choices'][0]['message']['content']
|
60 |
-
print("[DeepseekClient] API request successful")
|
61 |
-
print(f"[DeepseekClient] Received response content (length: {len(content)})")
|
62 |
-
return content
|
63 |
-
|
64 |
-
except requests.exceptions.RequestException as e:
|
65 |
-
print(f"[DeepseekClient] Request failed: {str(e)}")
|
66 |
-
print(f"Deepseek Request Error: {str(e)}")
|
67 |
-
last_error = e
|
68 |
-
retries += 1
|
69 |
-
if retries < self.max_retries:
|
70 |
-
time.sleep(self.retry_delay * (2 ** retries))
|
71 |
-
except ValueError as e:
|
72 |
-
print(f"[DeepseekClient] Value error occurred: {str(e)}")
|
73 |
-
print(f"Deepseek Value Error: {str(e)}")
|
74 |
-
last_error = e
|
75 |
-
break
|
76 |
-
except Exception as e:
|
77 |
-
print(f"[DeepseekClient] Unexpected error occurred: {str(e)}")
|
78 |
-
print(f"Deepseek Unexpected Error: {str(e)}")
|
79 |
-
last_error = e
|
80 |
-
break
|
81 |
-
|
82 |
-
# If all retries failed or other error occurred
|
83 |
-
error_msg = str(last_error) if last_error else "Maximum retries exceeded"
|
84 |
-
if "Rate limit exceeded" in error_msg:
|
85 |
-
raise Exception(f"Deepseek rate limit exceeded. Please try again later or switch to a different model.")
|
86 |
-
raise Exception(f"Deepseek API error: {error_msg}")
|
|
|
1 |
import os
|
2 |
+
from typing import List, Dict
|
|
|
|
|
3 |
from config import Config
|
4 |
+
from base_client import BaseClient
|
5 |
|
6 |
+
class DeepseekClient(BaseClient):
|
7 |
def __init__(self, api_key=None, **kwargs):
|
8 |
+
print(f"[DEBUG] Starting DeepseekClient")
|
9 |
+
super().__init__(
|
10 |
+
api_key=api_key or Config.get_deepseek_key(),
|
11 |
+
**kwargs
|
12 |
+
)
|
13 |
+
print(f"[DEBUG] API key configured")
|
14 |
+
|
15 |
+
def create(self, messages: List[Dict[str, str]], model: str = None, **kwargs) -> str:
|
16 |
+
print(f"[DEBUG] Starting DeepseekClient::create")
|
17 |
+
print(f"[DEBUG] Model: {model or 'deepseek-chat'}")
|
18 |
+
print(f"[DEBUG] Messages: {messages}")
|
19 |
+
|
20 |
+
try:
|
21 |
+
result = self.deepseek_chat(messages=messages, model=model, **kwargs)
|
22 |
+
if result:
|
23 |
+
print(f"[DEBUG] API request successful")
|
24 |
+
print(f"[DEBUG] Response: {result[:100]}...") # Show first 100 chars of response
|
25 |
+
return result
|
26 |
+
except Exception as e:
|
27 |
+
print(f"[DEBUG] Error in create: {str(e)}")
|
28 |
+
raise
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
main.py
CHANGED
@@ -129,6 +129,12 @@ def render_template_manager(i18n):
|
|
129 |
show_notification(f"{i18n.get_text('template_error')}: {str(e)}", "error")
|
130 |
|
131 |
def main():
|
|
|
|
|
|
|
|
|
|
|
|
|
132 |
i18n = st.session_state.i18n
|
133 |
chat_manager = st.session_state.chat_manager
|
134 |
llm_client = st.session_state.llm_client
|
@@ -209,7 +215,7 @@ def main():
|
|
209 |
"OpenAI": llm_client.chat_openai,
|
210 |
"Claude-3.5": llm_client.chat_claude,
|
211 |
"Gemini-2.0": llm_client.chat_gemini,
|
212 |
-
"
|
213 |
}
|
214 |
|
215 |
if model in model_map:
|
@@ -260,4 +266,7 @@ def main():
|
|
260 |
st.rerun()
|
261 |
|
262 |
if __name__ == "__main__":
|
|
|
|
|
|
|
263 |
main()
|
|
|
129 |
show_notification(f"{i18n.get_text('template_error')}: {str(e)}", "error")
|
130 |
|
131 |
def main():
|
132 |
+
# アプリケーション起動情報
|
133 |
+
print("\n[MyChatMe] アプリケーションを起動中...")
|
134 |
+
print(f" Version: {Config.VERSION}")
|
135 |
+
print(f" 言語: {st.session_state.i18n._current_language}")
|
136 |
+
print(" コンポーネントを初期化中...")
|
137 |
+
|
138 |
i18n = st.session_state.i18n
|
139 |
chat_manager = st.session_state.chat_manager
|
140 |
llm_client = st.session_state.llm_client
|
|
|
215 |
"OpenAI": llm_client.chat_openai,
|
216 |
"Claude-3.5": llm_client.chat_claude,
|
217 |
"Gemini-2.0": llm_client.chat_gemini,
|
218 |
+
"deepseek-chat": llm_client.chat_deepseek
|
219 |
}
|
220 |
|
221 |
if model in model_map:
|
|
|
266 |
st.rerun()
|
267 |
|
268 |
if __name__ == "__main__":
|
269 |
+
print("========================================")
|
270 |
+
print("MyChatMe - 多言語AIチャットアプリケーション")
|
271 |
+
print("========================================")
|
272 |
main()
|
task_history.txt
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
タスク履歴:
|
2 |
+
|
3 |
+
1. deepseek_client.pyの変更:
|
4 |
+
- OpenAIクラスのインポート
|
5 |
+
- DeepseekClientがOpenAIクラスを継承
|
6 |
+
- 親クラスの初期化呼び出し
|
7 |
+
- createメソッドのオーバーライド
|
8 |
+
|
9 |
+
2. reportlab関連のエラー対応:
|
10 |
+
- reportlabのインストール提案 (ユーザー拒否)
|
11 |
+
- PDFエクスポート機能削除提案 (ユーザー拒否)
|
12 |
+
- 最終的にユーザーの指示でタスク停止
|
13 |
+
|
14 |
+
3. タスク履歴保存:
|
15 |
+
- 本ファイルを作成し、タスクの経緯を記録
|
16 |
+
|
17 |
+
4. Deepseekチャット機能の改善:
|
18 |
+
- base_client.pyにdeepseek_chatメソッドを追加
|
19 |
+
- deepseek_client.pyの実装を親クラスのメソッドを使用するように変更
|
20 |
+
- エラーハンドリングの改善
|
21 |
+
- デバッグログの整理
|
22 |
+
|
23 |
+
5. メソッド名の変更:
|
24 |
+
- base_client.pyのchatメソッドをopenai_chatに変更
|
25 |
+
- 影響範囲を確認し、安全に名前変更を実施
|
26 |
+
- OpenAIとDeepseekのチャットメソッドの区別を明確化
|