koura718 commited on
Commit
2cf6258
·
1 Parent(s): 46e66e9

Change baseclient to custom_openai_client

Browse files
Files changed (5) hide show
  1. base_client.py +0 -54
  2. custom_openai_client.py +220 -0
  3. deepseek_client.py +17 -9
  4. roots.sst +0 -0
  5. test_deepseek.py +27 -0
base_client.py DELETED
@@ -1,54 +0,0 @@
1
- from openai import OpenAI
2
- import requests
3
- from typing import List, Dict, Optional
4
-
5
- class BaseClient(OpenAI):
6
- def __init__(self, api_key, **kwargs):
7
- super().__init__(api_key=api_key, **kwargs)
8
-
9
- def openai_chat(self, messages, model="gpt-3.5-turbo", **kwargs):
10
- try:
11
- response = self.chat.completions.create(
12
- model=model,
13
- messages=messages,
14
- **kwargs
15
- )
16
- return response.choices[0].message.content
17
- except Exception as e:
18
- print(f"OpenAI Chat API Error: {e}")
19
- return None
20
-
21
- def deepseek_chat(self, messages: List[Dict[str, str]], model: str = "deepseek-chat", **kwargs) -> Optional[str]:
22
- try:
23
- headers = {
24
- "Content-Type": "application/json",
25
- "Authorization": f"Bearer {self.api_key}"
26
- }
27
-
28
- data = {
29
- "model": model,
30
- "messages": messages,
31
- "temperature": kwargs.get("temperature", 0.7),
32
- "max_tokens": kwargs.get("max_tokens", 1000),
33
- "stream": False
34
- }
35
-
36
- response = requests.post(
37
- "https://api.deepseek.com/v1/chat/completions",
38
- headers=headers,
39
- json=data
40
- )
41
-
42
- if response.status_code != 200:
43
- raise ValueError(f"Deepseek API request failed with status {response.status_code}")
44
-
45
- response_data = response.json()
46
-
47
- if not response_data.get("choices"):
48
- raise ValueError("No choices in Deepseek API response")
49
-
50
- return response_data["choices"][0]["message"]["content"]
51
-
52
- except Exception as e:
53
- print(f"Deepseek Chat API Error: {e}")
54
- return None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
custom_openai_client.py ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import List, Dict, Optional, Union
2
+ from openai import OpenAI
3
+ from openai.types.chat import ChatCompletion
4
+ from openai._types import NotGiven, NOT_GIVEN
5
+ import openai
6
+ import requests
7
+ from dataclasses import dataclass
8
+ from enum import Enum
9
+
10
+ class ModelProvider(Enum):
11
+ OPENAI = "openai"
12
+ DEEPSEEK = "deepseek"
13
+
14
+ @dataclass
15
+ class ApiConfig:
16
+ base_url: str
17
+ api_version: str = "v1"
18
+
19
+ @property
20
+ def chat_endpoint(self) -> str:
21
+ return f"{self.base_url}/{self.api_version}/chat/completions"
22
+
23
+ class CustomOpenAI(OpenAI):
24
+ # API Configuration for different providers
25
+ API_CONFIGS = {
26
+ ModelProvider.OPENAI: ApiConfig("https://api.openai.com"),
27
+ ModelProvider.DEEPSEEK: ApiConfig("https://api.deepseek.ai")
28
+ }
29
+
30
+ def __init__(
31
+ self,
32
+ api_key: Optional[str] = None,
33
+ organization: Optional[str] = None,
34
+ deepseek_api_key: Optional[str] = None,
35
+ **kwargs
36
+ ):
37
+ """
38
+ Initialize CustomOpenAI client with enhanced chat functionality.
39
+
40
+ Args:
41
+ api_key: OpenAI API key
42
+ organization: Organization ID (optional)
43
+ deepseek_api_key: DeepSeek API key (optional)
44
+ **kwargs: Additional client configuration parameters
45
+ """
46
+ super().__init__(
47
+ api_key=api_key,
48
+ organization=organization,
49
+ **kwargs
50
+ )
51
+ self.deepseek_api_key = deepseek_api_key
52
+
53
+ def simple_chat(
54
+ self,
55
+ messages: List[Dict[str, str]],
56
+ model: str = "gpt-3.5-turbo",
57
+ temperature: float = 0.7,
58
+ max_tokens: Optional[int] = None,
59
+ **kwargs
60
+ ) -> Optional[str]:
61
+ """
62
+ Simplified chat completion method that returns just the message content.
63
+
64
+ Args:
65
+ messages: List of message dictionaries with 'role' and 'content'
66
+ model: Model identifier to use
67
+ temperature: Sampling temperature (0-2)
68
+ max_tokens: Maximum number of tokens to generate
69
+ **kwargs: Additional parameters to pass to the API
70
+
71
+ Returns:
72
+ Generated message content or None if an error occurs
73
+ """
74
+ try:
75
+ # Prepare parameters
76
+ params = {
77
+ "model": model,
78
+ "messages": messages,
79
+ "temperature": temperature,
80
+ }
81
+
82
+ # Add max_tokens only if specified
83
+ if max_tokens is not None:
84
+ params["max_tokens"] = max_tokens
85
+
86
+ # Add any additional kwargs
87
+ params.update(kwargs)
88
+
89
+ # Make the API call using the inherited chat completions method
90
+ response: ChatCompletion = self.chat.completions.create(**params)
91
+
92
+ # Extract the message content from the first choice
93
+ if response.choices and len(response.choices) > 0:
94
+ return response.choices[0].message.content
95
+
96
+ return None
97
+
98
+ except openai.APIError as e:
99
+ print(f"OpenAI API Error: {str(e)}")
100
+ return None
101
+ except Exception as e:
102
+ print(f"Unexpected error: {str(e)}")
103
+ return None
104
+
105
+ def deepseek_chat(
106
+ self,
107
+ messages: List[Dict[str, str]],
108
+ model: str = "deepseek-chat",
109
+ temperature: float = 0.7,
110
+ max_tokens: Optional[int] = None,
111
+ **kwargs
112
+ ) -> Optional[str]:
113
+ """
114
+ Chat completion method for DeepSeek models.
115
+
116
+ Args:
117
+ messages: List of message dictionaries with 'role' and 'content'
118
+ model: DeepSeek model identifier
119
+ temperature: Sampling temperature (0-2)
120
+ max_tokens: Maximum number of tokens to generate
121
+ **kwargs: Additional parameters to pass to the API
122
+
123
+ Returns:
124
+ Generated message content or None if an error occurs
125
+ """
126
+ if not self.deepseek_api_key:
127
+ raise ValueError("DeepSeek API key is required for deepseek_chat")
128
+
129
+ try:
130
+ headers = {
131
+ "Content-Type": "application/json",
132
+ "Authorization": f"Bearer {self.deepseek_api_key}",
133
+ "User-Agent": "DeepseekClient/1.0"
134
+ }
135
+
136
+ data = {
137
+ "model": model,
138
+ "messages": messages,
139
+ "temperature": temperature,
140
+ "stream": False
141
+ }
142
+
143
+ if max_tokens is not None:
144
+ data["max_tokens"] = max_tokens
145
+
146
+ # Add any additional kwargs to the request data
147
+ data.update(kwargs)
148
+
149
+ config = self.API_CONFIGS[ModelProvider.DEEPSEEK]
150
+ response = requests.post(
151
+ config.chat_endpoint,
152
+ headers=headers,
153
+ json=data,
154
+ verify=False, # SSL検証を無効化(開発環境のみ)
155
+ timeout=(10, 60) # 接続タイムアウト10秒、読み取りタイムアウト60秒に延長
156
+ )
157
+ # SSL検証を無効にした警告を抑制
158
+ import urllib3
159
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
160
+
161
+ if response.status_code != 200:
162
+ error_msg = f"DeepSeek API request failed with status {response.status_code}"
163
+ try:
164
+ error_data = response.json()
165
+ print(f"[DEBUG] Error response: {error_data}")
166
+ if "error" in error_data:
167
+ error_msg += f": {error_data['error']}"
168
+ except Exception as e:
169
+ print(f"[DEBUG] Failed to parse error response: {str(e)}")
170
+ raise ValueError(error_msg)
171
+
172
+ response_data = response.json()
173
+
174
+ if not response_data.get("choices"):
175
+ raise ValueError("No choices in DeepSeek API response")
176
+
177
+ return response_data["choices"][0]["message"]["content"]
178
+
179
+ except requests.exceptions.RequestException as e:
180
+ print(f"Network error during DeepSeek API call: {str(e)}")
181
+ return None
182
+ except ValueError as e:
183
+ print(f"DeepSeek API Error: {str(e)}")
184
+ return None
185
+ except Exception as e:
186
+ print(f"Unexpected error in DeepSeek chat: {str(e)}")
187
+ return None
188
+
189
+ def chat_with_retry(
190
+ self,
191
+ messages: List[Dict[str, str]],
192
+ provider: ModelProvider = ModelProvider.OPENAI,
193
+ max_retries: int = 3,
194
+ **kwargs
195
+ ) -> Optional[str]:
196
+ """
197
+ Chat completion with automatic retry on failure.
198
+
199
+ Args:
200
+ messages: List of message dictionaries
201
+ provider: Model provider (OPENAI or DEEPSEEK)
202
+ max_retries: Maximum number of retry attempts
203
+ **kwargs: Additional parameters for chat methods
204
+
205
+ Returns:
206
+ Generated message content or None if all retries fail
207
+ """
208
+ chat_method = self.simple_chat if provider == ModelProvider.OPENAI else self.deepseek_chat
209
+
210
+ for attempt in range(max_retries):
211
+ try:
212
+ result = chat_method(messages, **kwargs)
213
+ if result is not None:
214
+ return result
215
+ except Exception as e:
216
+ if attempt == max_retries - 1:
217
+ print(f"Failed after {max_retries} attempts: {str(e)}")
218
+ return None
219
+ print(f"Attempt {attempt + 1} failed, retrying...")
220
+ return None
deepseek_client.py CHANGED
@@ -1,28 +1,36 @@
1
  import os
2
  from typing import List, Dict
3
  from config import Config
4
- from base_client import BaseClient
5
 
6
- class DeepseekClient(BaseClient):
7
  def __init__(self, api_key=None, **kwargs):
8
  print(f"[DEBUG] Starting DeepseekClient")
9
- super().__init__(
10
- api_key=api_key or Config.get_deepseek_key(),
11
- **kwargs
12
- )
13
- print(f"[DEBUG] API key configured")
 
 
 
 
 
 
14
 
15
  def create(self, messages: List[Dict[str, str]], model: str = None, **kwargs) -> str:
16
  print(f"[DEBUG] Starting DeepseekClient::create")
17
  print(f"[DEBUG] Model: {model or 'deepseek-chat'}")
18
- print(f"[DEBUG] Messages: {messages}")
19
 
20
  try:
21
  result = self.deepseek_chat(messages=messages, model=model, **kwargs)
22
  if result:
23
  print(f"[DEBUG] API request successful")
24
  print(f"[DEBUG] Response: {result[:100]}...") # Show first 100 chars of response
25
- return result
 
 
26
  except Exception as e:
27
  print(f"[DEBUG] Error in create: {str(e)}")
28
  raise
 
1
  import os
2
  from typing import List, Dict
3
  from config import Config
4
+ from custom_openai_client import CustomOpenAI
5
 
6
+ class DeepseekClient(CustomOpenAI):
7
  def __init__(self, api_key=None, **kwargs):
8
  print(f"[DEBUG] Starting DeepseekClient")
9
+ try:
10
+ api_key = api_key or Config.get_deepseek_key()
11
+ print(f"[DEBUG] Using API key: {api_key[:4]}...") # 最初の4文字のみ表示
12
+ super().__init__(
13
+ deepseek_api_key=api_key,
14
+ **kwargs
15
+ )
16
+ print(f"[DEBUG] API client initialized successfully")
17
+ except Exception as e:
18
+ print(f"[DEBUG] Initialization error: {str(e)}")
19
+ raise
20
 
21
  def create(self, messages: List[Dict[str, str]], model: str = None, **kwargs) -> str:
22
  print(f"[DEBUG] Starting DeepseekClient::create")
23
  print(f"[DEBUG] Model: {model or 'deepseek-chat'}")
24
+ print(f"[DEBUG] Messages: {[{k: v for k, v in m.items()} for m in messages]}")
25
 
26
  try:
27
  result = self.deepseek_chat(messages=messages, model=model, **kwargs)
28
  if result:
29
  print(f"[DEBUG] API request successful")
30
  print(f"[DEBUG] Response: {result[:100]}...") # Show first 100 chars of response
31
+ return result
32
+ else:
33
+ raise Exception("No response from DeepSeek API")
34
  except Exception as e:
35
  print(f"[DEBUG] Error in create: {str(e)}")
36
  raise
roots.sst ADDED
Binary file (765 kB). View file
 
test_deepseek.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from deepseek_client import DeepseekClient
2
+
3
+ def test_deepseek():
4
+ try:
5
+ print("Creating DeepseekClient...")
6
+ client = DeepseekClient()
7
+
8
+ messages = [
9
+ {
10
+ "role": "user",
11
+ "content": "こんにちは。あなたのモデルは何でしょうか?"
12
+ }
13
+ ]
14
+ print("\nSending test message...")
15
+ response = client.create(
16
+ messages=messages,
17
+ model="deepseek-chat-67b-alpha"
18
+ )
19
+
20
+ print("\nResponse received:")
21
+ print(response)
22
+
23
+ except Exception as e:
24
+ print(f"\nError occurred: {str(e)}")
25
+
26
+ if __name__ == "__main__":
27
+ test_deepseek()