Niansuh commited on
Commit
81ddbf3
·
verified ·
1 Parent(s): 4b6abf6

Create blackboxai.py

Browse files
Files changed (1) hide show
  1. api/providers/blackboxai.py +215 -0
api/providers/blackboxai.py ADDED
@@ -0,0 +1,215 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # api/providers/blackboxai.py
2
+
3
+ from __future__ import annotations
4
+
5
+ import json
6
+ from datetime import datetime
7
+ import uuid
8
+ from typing import Any, Dict, Optional
9
+
10
+ import httpx
11
+ from api.config import (
12
+ MODEL_MAPPING,
13
+ headers,
14
+ BASE_URL,
15
+ MODEL_PREFIXES,
16
+ MODEL_REFERERS,
17
+ )
18
+ from api.models import ChatRequest
19
+ from api.logger import setup_logger
20
+ from api.image import ImageResponse # Assuming similar structure to GizAI
21
+ from api.typing import AsyncResult, Messages
22
+ from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
23
+
24
+ logger = setup_logger(__name__)
25
+
26
+ class BlackBoxAI(AsyncGeneratorProvider, ProviderModelMixin):
27
+ url = "https://www.blackbox.ai"
28
+ api_endpoint = "https://www.blackbox.ai/api/chat"
29
+ working = True
30
+
31
+ supports_system_message = True
32
+ supports_message_history = True
33
+
34
+ # Define BlackBoxAI models
35
+ default_model = 'blackboxai'
36
+ chat_models = [
37
+ 'blackboxai',
38
+ 'blackboxai-pro',
39
+ 'flux',
40
+ 'llama-3.1-8b',
41
+ 'llama-3.1-70b',
42
+ 'llama-3.1-405b',
43
+ 'gpt-4o',
44
+ 'gemini-pro',
45
+ 'gemini-1.5-flash',
46
+ 'claude-sonnet-3.5',
47
+ 'PythonAgent',
48
+ 'JavaAgent',
49
+ 'JavaScriptAgent',
50
+ 'HTMLAgent',
51
+ 'GoogleCloudAgent',
52
+ 'AndroidDeveloper',
53
+ 'SwiftDeveloper',
54
+ 'Next.jsAgent',
55
+ 'MongoDBAgent',
56
+ 'PyTorchAgent',
57
+ 'ReactAgent',
58
+ 'XcodeAgent',
59
+ 'AngularJSAgent',
60
+ 'RepoMap',
61
+ 'gemini-1.5-pro-latest',
62
+ 'gemini-1.5-pro',
63
+ 'claude-3-5-sonnet-20240620',
64
+ 'claude-3-5-sonnet',
65
+ 'Niansuh',
66
+ ]
67
+
68
+ image_models = [] # Add image models if applicable
69
+
70
+ models = chat_models + image_models
71
+
72
+ model_aliases = {
73
+ # Add aliases if any
74
+ }
75
+
76
+ @classmethod
77
+ def get_model(cls, model: str) -> str:
78
+ return MODEL_MAPPING.get(model, cls.default_model)
79
+
80
+ @classmethod
81
+ def is_image_model(cls, model: str) -> bool:
82
+ return model in cls.image_models
83
+
84
+ @classmethod
85
+ async def create_async_generator(
86
+ cls,
87
+ model: str,
88
+ messages: Messages,
89
+ proxy: str = None,
90
+ **kwargs
91
+ ) -> AsyncResult:
92
+ model = cls.get_model(model)
93
+ model_prefix = MODEL_PREFIXES.get(model, "")
94
+ referer_path = MODEL_REFERERS.get(model, f"/?model={model}")
95
+ referer_url = f"{BASE_URL}{referer_path}"
96
+
97
+ # Update headers with dynamic Referer
98
+ dynamic_headers = headers.copy()
99
+ dynamic_headers['Referer'] = referer_url
100
+
101
+ json_data = {
102
+ "messages": [cls.message_to_dict(msg, model_prefix) for msg in messages],
103
+ "stream": kwargs.get('stream', False),
104
+ "temperature": kwargs.get('temperature', 0.7),
105
+ "top_p": kwargs.get('top_p', 0.9),
106
+ "max_tokens": kwargs.get('max_tokens', 99999999),
107
+ }
108
+
109
+ async with httpx.AsyncClient() as client:
110
+ try:
111
+ if json_data.get("stream"):
112
+ async with client.stream(
113
+ "POST",
114
+ cls.api_endpoint,
115
+ headers=dynamic_headers,
116
+ json=json_data,
117
+ timeout=100,
118
+ ) as response:
119
+ response.raise_for_status()
120
+ async for line in response.aiter_lines():
121
+ timestamp = int(datetime.now().timestamp())
122
+ if line:
123
+ content = line
124
+ if content.startswith("$@$v=undefined-rv1$@$"):
125
+ content = content[21:]
126
+ # Strip the model prefix from the response content
127
+ cleaned_content = cls.strip_model_prefix(content, model_prefix)
128
+ yield f"data: {json.dumps(cls.create_chat_completion_data(cleaned_content, model, timestamp))}\n\n"
129
+
130
+ yield f"data: {json.dumps(cls.create_chat_completion_data('', model, timestamp, 'stop'))}\n\n"
131
+ yield "data: [DONE]\n\n"
132
+ else:
133
+ response = await client.post(
134
+ cls.api_endpoint,
135
+ headers=dynamic_headers,
136
+ json=json_data,
137
+ timeout=100,
138
+ )
139
+ response.raise_for_status()
140
+ full_response = response.text
141
+ if full_response.startswith("$@$v=undefined-rv1$@$"):
142
+ full_response = full_response[21:]
143
+ # Strip the model prefix from the full response
144
+ cleaned_full_response = cls.strip_model_prefix(full_response, model_prefix)
145
+ return {
146
+ "id": f"chatcmpl-{uuid.uuid4()}",
147
+ "object": "chat.completion",
148
+ "created": int(datetime.now().timestamp()),
149
+ "model": model,
150
+ "choices": [
151
+ {
152
+ "index": 0,
153
+ "message": {"role": "assistant", "content": cleaned_full_response},
154
+ "finish_reason": "stop",
155
+ }
156
+ ],
157
+ "usage": None,
158
+ }
159
+ except httpx.HTTPStatusError as e:
160
+ logger.error(f"HTTP error occurred: {e}")
161
+ raise HTTPException(status_code=e.response.status_code, detail=str(e))
162
+ except httpx.RequestError as e:
163
+ logger.error(f"Error occurred during request: {e}")
164
+ raise HTTPException(status_code=500, detail=str(e))
165
+
166
+ @staticmethod
167
+ def message_to_dict(message, model_prefix: Optional[str] = None):
168
+ if isinstance(message["content"], str):
169
+ content = message["content"]
170
+ if model_prefix:
171
+ content = f"{model_prefix} {content}"
172
+ return {"role": message["role"], "content": content}
173
+ elif isinstance(message["content"], list) and len(message["content"]) == 2:
174
+ content = message["content"][0]["text"]
175
+ if model_prefix:
176
+ content = f"{model_prefix} {content}"
177
+ return {
178
+ "role": message["role"],
179
+ "content": content,
180
+ "data": {
181
+ "imageBase64": message["content"][1]["image_url"]["url"],
182
+ "fileText": "",
183
+ "title": "snapshot",
184
+ },
185
+ }
186
+ else:
187
+ return {"role": message["role"], "content": message["content"]}
188
+
189
+ @staticmethod
190
+ def strip_model_prefix(content: str, model_prefix: Optional[str] = None) -> str:
191
+ """Remove the model prefix from the response content if present."""
192
+ if model_prefix and content.startswith(model_prefix):
193
+ logger.debug(f"Stripping prefix '{model_prefix}' from content.")
194
+ return content[len(model_prefix):].strip()
195
+ logger.debug("No prefix to strip from content.")
196
+ return content
197
+
198
+ @staticmethod
199
+ def create_chat_completion_data(
200
+ content: str, model: str, timestamp: int, finish_reason: Optional[str] = None
201
+ ) -> Dict[str, Any]:
202
+ return {
203
+ "id": f"chatcmpl-{uuid.uuid4()}",
204
+ "object": "chat.completion.chunk",
205
+ "created": timestamp,
206
+ "model": model,
207
+ "choices": [
208
+ {
209
+ "index": 0,
210
+ "delta": {"content": content, "role": "assistant"},
211
+ "finish_reason": finish_reason,
212
+ }
213
+ ],
214
+ "usage": None,
215
+ }