Spaces:
mxrkai
/
Runtime error

Niansuh commited on
Commit
0705903
·
verified ·
1 Parent(s): d4f4416

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +153 -146
main.py CHANGED
@@ -202,160 +202,167 @@ class Blackbox:
202
  return cleaned_text
203
 
204
  @classmethod
205
- async def generate_response(
206
- cls,
207
- model: str,
208
- messages: List[Dict[str, str]],
209
- proxy: Optional[str] = None,
210
- websearch: bool = False,
211
- **kwargs
212
- ) -> Union[str, ImageResponseModel]:
213
- model = cls.get_model(model)
214
- chat_id = cls.generate_random_string()
215
- next_action = cls.generate_next_action()
216
- next_router_state_tree = cls.generate_next_router_state_tree()
217
-
218
- agent_mode = cls.agentMode.get(model, {})
219
- trending_agent_mode = cls.trendingAgentMode.get(model, {})
220
-
221
- prefix = cls.model_prefixes.get(model, "")
222
-
223
- formatted_prompt = ""
224
- for message in messages:
225
- role = message.get('role', '').capitalize()
226
- content = message.get('content', '')
227
- if role and content:
228
- formatted_prompt += f"{role}: {content}\n"
229
-
230
- if prefix:
231
- formatted_prompt = f"{prefix} {formatted_prompt}".strip()
232
-
233
- referer_path = cls.model_referers.get(model, f"/?model={model}")
234
- referer_url = f"{cls.url}{referer_path}"
235
-
236
- common_headers = {
237
- 'accept': '*/*',
238
- 'accept-language': 'en-US,en;q=0.9',
239
- 'cache-control': 'no-cache',
240
- 'origin': cls.url,
241
- 'pragma': 'no-cache',
242
- 'priority': 'u=1, i',
243
- 'sec-ch-ua': '"Chromium";v="129", "Not=A?Brand";v="8"',
244
- 'sec-ch-ua-mobile': '?0',
245
- 'sec-ch-ua-platform': '"Linux"',
246
- 'sec-fetch-dest': 'empty',
247
- 'sec-fetch-mode': 'cors',
248
- 'sec-fetch-site': 'same-origin',
249
- 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) '
250
- 'AppleWebKit/537.36 (KHTML, like Gecko) '
251
- 'Chrome/129.0.0.0 Safari/537.36'
252
- }
253
-
254
- headers_api_chat = {
255
- 'Content-Type': 'application/json',
256
- 'Referer': referer_url
257
- }
258
- headers_api_chat_combined = {**common_headers, **headers_api_chat}
259
-
260
- payload_api_chat = {
261
- "messages": [
262
- {
263
- "id": chat_id,
264
- "content": formatted_prompt,
265
- "role": "user"
266
- }
267
- ],
268
- "id": chat_id,
269
- "previewToken": None,
270
- "userId": None,
271
- "codeModelMode": True,
272
- "agentMode": agent_mode,
273
- "trendingAgentMode": trending_agent_mode,
274
- "isMicMode": False,
275
- "userSystemPrompt": None,
276
- "maxTokens": 1024,
277
- "playgroundTopP": 0.9,
278
- "playgroundTemperature": 0.5,
279
- "isChromeExt": False,
280
- "githubToken": None,
281
- "clickedAnswer2": False,
282
- "clickedAnswer3": False,
283
- "clickedForceWebSearch": False,
284
- "visitFromDelta": False,
285
- "mobileClient": False,
286
- "webSearchMode": websearch,
287
- "userSelectedModel": cls.userSelectedModel.get(model, model)
288
- }
289
-
290
- headers_chat = {
291
- 'Accept': 'text/x-component',
292
- 'Content-Type': 'text/plain;charset=UTF-8',
293
- 'Referer': f'{cls.url}/chat/{chat_id}?model={model}',
294
- 'next-action': next_action,
295
- 'next-router-state-tree': next_router_state_tree,
296
- 'next-url': '/'
297
- }
298
- headers_chat_combined = {**common_headers, **headers_chat}
299
 
300
- data_chat = '[]'
 
 
 
 
301
 
302
- async with ClientSession(headers=common_headers) as session:
303
- try:
304
- async with session.post(
305
- cls.api_endpoint,
306
- headers=headers_api_chat_combined,
307
- json=payload_api_chat,
308
- proxy=proxy
309
- ) as response_api_chat:
310
- response_api_chat.raise_for_status()
311
- text = await response_api_chat.text()
312
- cleaned_response = cls.clean_response(text)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
313
 
314
- if model in cls.image_models:
315
- match = re.search(r'!\[.*?\]\((https?://[^\)]+)\)', cleaned_response)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
316
  if match:
317
- image_url = match.group(1)
318
- image_response = ImageResponseModel(images=image_url, alt="Generated Image")
319
- return image_response
 
 
 
 
 
 
 
 
 
 
320
  else:
321
- return cleaned_response
322
  else:
323
- if websearch:
324
- match = re.search(r'\$~~~\$(.*?)\$~~~\$', cleaned_response, re.DOTALL)
325
- if match:
326
- source_part = match.group(1).strip()
327
- answer_part = cleaned_response[match.end():].strip()
328
- try:
329
- sources = json.loads(source_part)
330
- source_formatted = "**Source:**\n"
331
- for item in sources:
332
- title = item.get('title', 'No Title')
333
- link = item.get('link', '#')
334
- position = item.get('position', '')
335
- source_formatted += f"{position}. [{title}]({link})\n"
336
- final_response = f"{answer_part}\n\n{source_formatted}"
337
- except json.JSONDecodeError:
338
- final_response = f"{answer_part}\n\nSource information is unavailable."
339
- else:
340
- final_response = cleaned_response
341
  else:
342
- if '$~~~$' in cleaned_response:
343
- final_response = cleaned_response.split('$~~~$')[0].strip()
344
- else:
345
- final_response = cleaned_response
346
 
347
- return final_response
348
- except ClientResponseError as e:
349
- error_text = f"Error {e.status}: {e.message}"
350
- try:
351
- error_response = await e.response.text()
352
- cleaned_error = cls.clean_response(error_response)
353
- error_text += f" - {cleaned_error}"
354
- except Exception:
355
- pass
356
- return error_text
357
- except Exception as e:
358
- return f"Unexpected error during /api/chat request: {str(e)}"
 
 
 
359
 
360
  @classmethod
361
  async def create_async_generator(
 
202
  return cleaned_text
203
 
204
  @classmethod
205
+ async def generate_response(
206
+ cls,
207
+ model: str,
208
+ messages: List[Dict[str, str]],
209
+ proxy: Optional[str] = None,
210
+ websearch: bool = False,
211
+ **kwargs
212
+ ) -> Union[str, ImageResponseModel]:
213
+ model = cls.get_model(model)
214
+ chat_id = cls.generate_random_string()
215
+ next_action = cls.generate_next_action()
216
+ next_router_state_tree = cls.generate_next_router_state_tree()
217
+
218
+ agent_mode = cls.agentMode.get(model, {})
219
+ trending_agent_mode = cls.trendingAgentMode.get(model, {})
220
+
221
+ prefix = cls.model_prefixes.get(model, "")
222
+
223
+ formatted_prompt = ""
224
+ for message in messages:
225
+ role = message.get('role', '').capitalize()
226
+ content = message.get('content', '')
227
+ if role and content:
228
+ formatted_prompt += f"{role}: {content}\n"
229
+
230
+ if prefix:
231
+ formatted_prompt = f"{prefix} {formatted_prompt}".strip()
232
+
233
+ referer_path = cls.model_referers.get(model, f"/?model={model}")
234
+ referer_url = f"{cls.url}{referer_path}"
235
+
236
+ common_headers = {
237
+ 'accept': '*/*',
238
+ 'accept-language': 'en-US,en;q=0.9',
239
+ 'cache-control': 'no-cache',
240
+ 'origin': cls.url,
241
+ 'pragma': 'no-cache',
242
+ 'priority': 'u=1, i',
243
+ 'sec-ch-ua': '"Chromium";v="129", "Not=A?Brand";v="8"',
244
+ 'sec-ch-ua-mobile': '?0',
245
+ 'sec-ch-ua-platform': '"Linux"',
246
+ 'sec-fetch-dest': 'empty',
247
+ 'sec-fetch-mode': 'cors',
248
+ 'sec-fetch-site': 'same-origin',
249
+ 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) '
250
+ 'AppleWebKit/537.36 (KHTML, like Gecko) '
251
+ 'Chrome/129.0.0.0 Safari/537.36'
252
+ }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
253
 
254
+ headers_api_chat = {
255
+ 'Content-Type': 'application/json',
256
+ 'Referer': referer_url
257
+ }
258
+ headers_api_chat_combined = {**common_headers, **headers_api_chat}
259
 
260
+ payload_api_chat = {
261
+ "messages": [
262
+ {
263
+ "id": chat_id,
264
+ "content": formatted_prompt,
265
+ "role": "user"
266
+ }
267
+ ],
268
+ "id": chat_id,
269
+ "previewToken": None,
270
+ "userId": None,
271
+ "codeModelMode": True,
272
+ "agentMode": agent_mode,
273
+ "trendingAgentMode": trending_agent_mode,
274
+ "isMicMode": False,
275
+ "userSystemPrompt": None,
276
+ "maxTokens": 1024,
277
+ "playgroundTopP": 0.9,
278
+ "playgroundTemperature": 0.5,
279
+ "isChromeExt": False,
280
+ "githubToken": None,
281
+ "clickedAnswer2": False,
282
+ "clickedAnswer3": False,
283
+ "clickedForceWebSearch": False,
284
+ "visitFromDelta": False,
285
+ "mobileClient": False,
286
+ "webSearchMode": websearch,
287
+ "userSelectedModel": cls.userSelectedModel.get(model, model)
288
+ }
289
 
290
+ headers_chat = {
291
+ 'Accept': 'text/x-component',
292
+ 'Content-Type': 'text/plain;charset=UTF-8',
293
+ 'Referer': f'{cls.url}/chat/{chat_id}?model={model}',
294
+ 'next-action': next_action,
295
+ 'next-router-state-tree': next_router_state_tree,
296
+ 'next-url': '/'
297
+ }
298
+ headers_chat_combined = {**common_headers, **headers_chat}
299
+
300
+ data_chat = '[]'
301
+
302
+ async with ClientSession(headers=common_headers) as session:
303
+ try:
304
+ async with session.post(
305
+ cls.api_endpoint,
306
+ headers=headers_api_chat_combined,
307
+ json=payload_api_chat,
308
+ proxy=proxy
309
+ ) as response_api_chat:
310
+ response_api_chat.raise_for_status()
311
+ text = await response_api_chat.text()
312
+ logger.debug(f"Raw response from Blackbox API: {text}") # Added logging
313
+ cleaned_response = cls.clean_response(text)
314
+ logger.debug(f"Cleaned response: {cleaned_response}") # Added logging
315
+
316
+ if model in cls.image_models:
317
+ match = re.search(r'!\[.*?\]\((https?://[^\)]+)\)', cleaned_response)
318
+ if match:
319
+ image_url = match.group(1)
320
+ image_response = ImageResponseModel(images=image_url, alt="Generated Image")
321
+ logger.debug(f"Image URL extracted: {image_url}") # Added logging
322
+ return image_response
323
+ else:
324
+ logger.debug("No image URL found in the response.") # Added logging
325
+ return cleaned_response
326
+ else:
327
+ if websearch:
328
+ match = re.search(r'\$~~~\$(.*?)\$~~~\$', cleaned_response, re.DOTALL)
329
  if match:
330
+ source_part = match.group(1).strip()
331
+ answer_part = cleaned_response[match.end():].strip()
332
+ try:
333
+ sources = json.loads(source_part)
334
+ source_formatted = "**Source:**\n"
335
+ for item in sources:
336
+ title = item.get('title', 'No Title')
337
+ link = item.get('link', '#')
338
+ position = item.get('position', '')
339
+ source_formatted += f"{position}. [{title}]({link})\n"
340
+ final_response = f"{answer_part}\n\n{source_formatted}"
341
+ except json.JSONDecodeError:
342
+ final_response = f"{answer_part}\n\nSource information is unavailable."
343
  else:
344
+ final_response = cleaned_response
345
  else:
346
+ if '$~~~$' in cleaned_response:
347
+ final_response = cleaned_response.split('$~~~$')[0].strip()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
348
  else:
349
+ final_response = cleaned_response
 
 
 
350
 
351
+ logger.debug(f"Final response to return: {final_response}") # Added logging
352
+ return final_response
353
+ except ClientResponseError as e:
354
+ error_text = f"Error {e.status}: {e.message}"
355
+ try:
356
+ error_response = await e.response.text()
357
+ cleaned_error = cls.clean_response(error_response)
358
+ error_text += f" - {cleaned_error}"
359
+ logger.error(f"ClientResponseError: {error_text}") # Added logging
360
+ except Exception:
361
+ pass
362
+ return error_text
363
+ except Exception as e:
364
+ logger.exception(f"Unexpected error during /api/chat request: {str(e)}") # Added logging
365
+ return f"Unexpected error during /api/chat request: {str(e)}"
366
 
367
  @classmethod
368
  async def create_async_generator(