koura718 commited on
Commit
d0d1766
·
1 Parent(s): 8378e98

Change custom_openai_client and deepseek_client

Browse files
Files changed (2) hide show
  1. custom_openai_client.py +18 -15
  2. deepseek_client.py +89 -2
custom_openai_client.py CHANGED
@@ -102,20 +102,20 @@ class CustomOpenAI(OpenAI):
102
  print(f"Unexpected error: {str(e)}")
103
  return None
104
 
105
- def deepseek_chat(
106
  self,
107
  messages: List[Dict[str, str]],
108
- model: str = "deepseek-chat",
109
  temperature: float = 0.7,
110
  max_tokens: Optional[int] = None,
111
  **kwargs
112
  ) -> Optional[str]:
113
  """
114
- Chat completion method for DeepSeek models.
115
 
116
  Args:
117
  messages: List of message dictionaries with 'role' and 'content'
118
- model: DeepSeek model identifier
119
  temperature: Sampling temperature (0-2)
120
  max_tokens: Maximum number of tokens to generate
121
  **kwargs: Additional parameters to pass to the API
@@ -123,14 +123,14 @@ class CustomOpenAI(OpenAI):
123
  Returns:
124
  Generated message content or None if an error occurs
125
  """
126
- if not self.deepseek_api_key:
127
- raise ValueError("DeepSeek API key is required for deepseek_chat")
128
 
129
  try:
130
  headers = {
131
  "Content-Type": "application/json",
132
- "Authorization": f"Bearer {self.deepseek_api_key}",
133
- "User-Agent": "DeepseekClient/1.0"
134
  }
135
 
136
  data = {
@@ -146,7 +146,7 @@ class CustomOpenAI(OpenAI):
146
  # Add any additional kwargs to the request data
147
  data.update(kwargs)
148
 
149
- config = self.API_CONFIGS[ModelProvider.DEEPSEEK]
150
  response = requests.post(
151
  config.chat_endpoint,
152
  headers=headers,
@@ -159,7 +159,7 @@ class CustomOpenAI(OpenAI):
159
  urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
160
 
161
  if response.status_code != 200:
162
- error_msg = f"DeepSeek API request failed with status {response.status_code}"
163
  try:
164
  error_data = response.json()
165
  print(f"[DEBUG] Error response: {error_data}")
@@ -172,18 +172,18 @@ class CustomOpenAI(OpenAI):
172
  response_data = response.json()
173
 
174
  if not response_data.get("choices"):
175
- raise ValueError("No choices in DeepSeek API response")
176
 
177
  return response_data["choices"][0]["message"]["content"]
178
 
179
  except requests.exceptions.RequestException as e:
180
- print(f"Network error during DeepSeek API call: {str(e)}")
181
  return None
182
  except ValueError as e:
183
- print(f"DeepSeek API Error: {str(e)}")
184
  return None
185
  except Exception as e:
186
- print(f"Unexpected error in DeepSeek chat: {str(e)}")
187
  return None
188
 
189
  def chat_with_retry(
@@ -205,7 +205,10 @@ class CustomOpenAI(OpenAI):
205
  Returns:
206
  Generated message content or None if all retries fail
207
  """
208
- chat_method = self.simple_chat if provider == ModelProvider.OPENAI else self.deepseek_chat
 
 
 
209
 
210
  for attempt in range(max_retries):
211
  try:
 
102
  print(f"Unexpected error: {str(e)}")
103
  return None
104
 
105
+ def openai_chat(
106
  self,
107
  messages: List[Dict[str, str]],
108
+ model: str = "gpt-4-mini",
109
  temperature: float = 0.7,
110
  max_tokens: Optional[int] = None,
111
  **kwargs
112
  ) -> Optional[str]:
113
  """
114
+ Chat completion method for OpenAI models.
115
 
116
  Args:
117
  messages: List of message dictionaries with 'role' and 'content'
118
+ model: OpenAI model identifier
119
  temperature: Sampling temperature (0-2)
120
  max_tokens: Maximum number of tokens to generate
121
  **kwargs: Additional parameters to pass to the API
 
123
  Returns:
124
  Generated message content or None if an error occurs
125
  """
126
+ if not self.api_key:
127
+ raise ValueError("OpenAI API key is required for openai_chat")
128
 
129
  try:
130
  headers = {
131
  "Content-Type": "application/json",
132
+ "Authorization": f"Bearer {self.api_key}",
133
+ "OpenAI-Organization": self.organization if self.organization else ""
134
  }
135
 
136
  data = {
 
146
  # Add any additional kwargs to the request data
147
  data.update(kwargs)
148
 
149
+ config = self.API_CONFIGS[ModelProvider.OPENAI]
150
  response = requests.post(
151
  config.chat_endpoint,
152
  headers=headers,
 
159
  urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
160
 
161
  if response.status_code != 200:
162
+ error_msg = f"OpenAI API request failed with status {response.status_code}"
163
  try:
164
  error_data = response.json()
165
  print(f"[DEBUG] Error response: {error_data}")
 
172
  response_data = response.json()
173
 
174
  if not response_data.get("choices"):
175
+ raise ValueError("No choices in OpenAI API response")
176
 
177
  return response_data["choices"][0]["message"]["content"]
178
 
179
  except requests.exceptions.RequestException as e:
180
+ print(f"Network error during OpenAI API call: {str(e)}")
181
  return None
182
  except ValueError as e:
183
+ print(f"OpenAI API Error: {str(e)}")
184
  return None
185
  except Exception as e:
186
+ print(f"Unexpected error in OpenAI chat: {str(e)}")
187
  return None
188
 
189
  def chat_with_retry(
 
205
  Returns:
206
  Generated message content or None if all retries fail
207
  """
208
+ chat_method = {
209
+ ModelProvider.OPENAI: self.openai_chat,
210
+ ModelProvider.DEEPSEEK: self.deepseek_chat
211
+ }.get(provider, self.simple_chat)
212
 
213
  for attempt in range(max_retries):
214
  try:
deepseek_client.py CHANGED
@@ -1,7 +1,11 @@
1
  import os
2
- from typing import List, Dict
3
  from config import Config
4
- from custom_openai_client import CustomOpenAI
 
 
 
 
5
 
6
  class DeepseekClient(CustomOpenAI):
7
  def __init__(self, api_key=None, **kwargs):
@@ -18,6 +22,89 @@ class DeepseekClient(CustomOpenAI):
18
  print(f"[DEBUG] Initialization error: {str(e)}")
19
  raise
20
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
  def create(self, messages: List[Dict[str, str]], model: str = None, **kwargs) -> str:
22
  print(f"[DEBUG] Starting DeepseekClient::create")
23
  print(f"[DEBUG] Model: {model or 'deepseek-chat'}")
 
1
  import os
2
+ from typing import List, Dict, Optional
3
  from config import Config
4
+ from custom_openai_client import CustomOpenAI, ModelProvider
5
+ import requests
6
+ from dataclasses import dataclass
7
+ from enum import Enum
8
+ import urllib3
9
 
10
  class DeepseekClient(CustomOpenAI):
11
  def __init__(self, api_key=None, **kwargs):
 
22
  print(f"[DEBUG] Initialization error: {str(e)}")
23
  raise
24
 
25
+ def deepseek_chat(
26
+ self,
27
+ messages: List[Dict[str, str]],
28
+ model: str = "deepseek-chat",
29
+ temperature: float = 0.7,
30
+ max_tokens: Optional[int] = None,
31
+ **kwargs
32
+ ) -> Optional[str]:
33
+ """
34
+ Chat completion method for DeepSeek models.
35
+
36
+ Args:
37
+ messages: List of message dictionaries with 'role' and 'content'
38
+ model: DeepSeek model identifier
39
+ temperature: Sampling temperature (0-2)
40
+ max_tokens: Maximum number of tokens to generate
41
+ **kwargs: Additional parameters to pass to the API
42
+
43
+ Returns:
44
+ Generated message content or None if an error occurs
45
+ """
46
+ if not self.deepseek_api_key:
47
+ raise ValueError("DeepSeek API key is required for deepseek_chat")
48
+
49
+ try:
50
+ headers = {
51
+ "Content-Type": "application/json",
52
+ "Authorization": f"Bearer {self.deepseek_api_key}",
53
+ "User-Agent": "DeepseekClient/1.0"
54
+ }
55
+
56
+ data = {
57
+ "model": model,
58
+ "messages": messages,
59
+ "temperature": temperature,
60
+ "stream": False
61
+ }
62
+
63
+ if max_tokens is not None:
64
+ data["max_tokens"] = max_tokens
65
+
66
+ # Add any additional kwargs to the request data
67
+ data.update(kwargs)
68
+
69
+ config = self.API_CONFIGS[ModelProvider.DEEPSEEK]
70
+ response = requests.post(
71
+ config.chat_endpoint,
72
+ headers=headers,
73
+ json=data,
74
+ verify=False, # SSL検証を無効化(開発環境のみ)
75
+ timeout=(10, 60) # 接続タイムアウト10秒、読み取りタイムアウト60秒に延長
76
+ )
77
+ # SSL検証を無効にした警告を抑制
78
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
79
+
80
+ if response.status_code != 200:
81
+ error_msg = f"DeepSeek API request failed with status {response.status_code}"
82
+ try:
83
+ error_data = response.json()
84
+ print(f"[DEBUG] Error response: {error_data}")
85
+ if "error" in error_data:
86
+ error_msg += f": {error_data['error']}"
87
+ except Exception as e:
88
+ print(f"[DEBUG] Failed to parse error response: {str(e)}")
89
+ raise ValueError(error_msg)
90
+
91
+ response_data = response.json()
92
+
93
+ if not response_data.get("choices"):
94
+ raise ValueError("No choices in DeepSeek API response")
95
+
96
+ return response_data["choices"][0]["message"]["content"]
97
+
98
+ except requests.exceptions.RequestException as e:
99
+ print(f"Network error during DeepSeek API call: {str(e)}")
100
+ return None
101
+ except ValueError as e:
102
+ print(f"DeepSeek API Error: {str(e)}")
103
+ return None
104
+ except Exception as e:
105
+ print(f"Unexpected error in DeepSeek chat: {str(e)}")
106
+ return None
107
+
108
  def create(self, messages: List[Dict[str, str]], model: str = None, **kwargs) -> str:
109
  print(f"[DEBUG] Starting DeepseekClient::create")
110
  print(f"[DEBUG] Model: {model or 'deepseek-chat'}")