isididiidid commited on
Commit
faefd73
·
verified ·
1 Parent(s): 607afd3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -136
app.py CHANGED
@@ -228,60 +228,16 @@ async def stream_openai_response(response, request_id: str, model: str, api_key,
228
 
229
  if line.startswith(b'data: '):
230
  try:
231
- data_text = line[6:].decode('utf-8')
232
- # 检查是否为有效JSON
233
- if data_text.strip():
234
- data = json.loads(data_text)
235
-
236
- if data.get('code') == 202 and data.get('data', {}).get('type') == "chat":
237
- # 获取正文内容
238
- content = data.get('data', {}).get('content', '')
239
- if content:
240
- full_response += content
241
-
242
- # 生成OpenAI格式的流式响应
243
- chunk = {
244
- "id": f"chatcmpl-{request_id}",
245
- "object": "chat.completion.chunk",
246
- "created": timestamp,
247
- "model": model,
248
- "choices": [
249
- {
250
- "index": 0,
251
- "delta": {
252
- "content": content
253
- },
254
- "finish_reason": None
255
- }
256
- ]
257
- }
258
- yield f"data: {json.dumps(chunk)}\n\n"
259
-
260
- elif data.get('code') == 203:
261
- # 生成完成信号
262
- chunk = {
263
- "id": f"chatcmpl-{request_id}",
264
- "object": "chat.completion.chunk",
265
- "created": timestamp,
266
- "model": model,
267
- "choices": [
268
- {
269
- "index": 0,
270
- "delta": {},
271
- "finish_reason": "stop"
272
- }
273
- ]
274
- }
275
- yield f"data: {json.dumps(chunk)}\n\n"
276
- yield "data: [DONE]\n\n"
277
-
278
- # 添加对错误代码的处理
279
- elif data.get('code') != 0:
280
- error_msg = data.get('message', 'Unknown error')
281
- logger.error(f"API返回错误: {error_msg}")
282
 
283
- # 返回错误信息给客户端
284
- error_chunk = {
285
  "id": f"chatcmpl-{request_id}",
286
  "object": "chat.completion.chunk",
287
  "created": timestamp,
@@ -290,41 +246,34 @@ async def stream_openai_response(response, request_id: str, model: str, api_key,
290
  {
291
  "index": 0,
292
  "delta": {
293
- "content": f"\n\n[API返回错误: {error_msg}]"
294
  },
295
- "finish_reason": "stop"
296
  }
297
  ]
298
  }
299
- yield f"data: {json.dumps(error_chunk)}\n\n"
300
- yield "data: [DONE]\n\n"
301
-
302
- except json.JSONDecodeError as e:
303
- logger.warning(f"无法解析响应: {line} - 错误: {str(e)}")
304
- except Exception as e:
305
- logger.warning(f"处理数据时出错: {str(e)} - 数据: {line}")
306
 
307
- # 检查是否没有收到任何内容
308
- if not full_response:
309
- logger.warning("未收到任何响应内容")
310
- # 发送一个提示消息
311
- empty_chunk = {
312
- "id": f"chatcmpl-{request_id}",
313
- "object": "chat.completion.chunk",
314
- "created": timestamp,
315
- "model": model,
316
- "choices": [
317
- {
318
- "index": 0,
319
- "delta": {
320
- "content": "[未收到API响应。请检查您的token是否有效或是否有足够的配额。]"
321
- },
322
- "finish_reason": "stop"
323
- }
324
- ]
325
- }
326
- yield f"data: {json.dumps(empty_chunk)}\n\n"
327
- yield "data: [DONE]\n\n"
328
 
329
  except Exception as e:
330
  logger.error(f"流式响应处理出错: {str(e)}")
@@ -357,16 +306,7 @@ async def stream_openai_response(response, request_id: str, model: str, api_key,
357
  # 路由定义
358
  @app.get("/")
359
  async def root():
360
- """返回简单的HTML页面,展示使用说明"""
361
- return {
362
- "message": "OpenAI API Proxy服务已启动 连接至DeepSider API",
363
- "usage": {
364
- "模型列表": "GET /v1/models",
365
- "聊天完成": "POST /v1/chat/completions",
366
- "账户余额": "GET /admin/balance (需要X-Admin-Key头)"
367
- },
368
- "说明": "请在Authorization头中使用Bearer token格式,支持使用英文逗号分隔多个token实现轮询"
369
- }
370
 
371
  @app.get("/v1/models")
372
  async def list_models(api_key: str = Depends(verify_api_key)):
@@ -419,9 +359,6 @@ async def create_chat_completion(
419
  current_token_index = (TOKEN_INDEX - 1) % len(tokens) if len(tokens) > 0 else 0
420
 
421
  try:
422
- # 记录请求信息
423
- logger.info(f"发送请求到DeepSider API - 模型: {deepsider_model}, Token索引: {current_token_index}")
424
-
425
  # 发送请求到DeepSider API
426
  response = requests.post(
427
  f"{DEEPSIDER_API_BASE}/chat/conversation",
@@ -452,39 +389,21 @@ async def create_chat_completion(
452
  else:
453
  # 收集完整响应
454
  full_response = ""
455
- has_error = False
456
- error_message = ""
457
-
458
  for line in response.iter_lines():
459
  if not line:
460
  continue
461
 
462
  if line.startswith(b'data: '):
463
  try:
464
- data_text = line[6:].decode('utf-8')
465
- if data_text.strip():
466
- data = json.loads(data_text)
467
-
468
- if data.get('code') == 202 and data.get('data', {}).get('type') == "chat":
469
- content = data.get('data', {}).get('content', '')
470
- if content:
471
- full_response += content
472
- elif data.get('code') != 0 and data.get('code') != 203:
473
- has_error = True
474
- error_message = data.get('message', 'Unknown error')
475
- logger.error(f"API返回错误: {error_message}")
476
 
477
- except json.JSONDecodeError as e:
478
- logger.warning(f"无法解析响应: {line} - 错误: {str(e)}")
479
- except Exception as e:
480
- logger.warning(f"处理数据时出错: {str(e)} - 数据: {line}")
481
-
482
- # 检查是否没有收到任何内容
483
- if not full_response and not has_error:
484
- logger.warning("未收到任何响应内容")
485
- full_response = "[未收到API响应。请检查您的token是否有效或是否有足够的配额。]"
486
- elif has_error:
487
- full_response = f"[API返回错误: {error_message}]"
488
 
489
  # 返回OpenAI格式的完整响应
490
  return await generate_openai_response(full_response, request_id, chat_request.model)
@@ -540,17 +459,7 @@ async def not_found_handler(request, exc):
540
  "type": "not_found_error",
541
  "code": "not_found"
542
  }
543
- }
544
-
545
- @app.exception_handler(500)
546
- async def server_error_handler(request, exc):
547
- return {
548
- "error": {
549
- "message": f"服务器内部错误: {str(exc)}",
550
- "type": "server_error",
551
- "code": "internal_server_error"
552
- }
553
- }
554
 
555
  # 启动事件
556
  @app.on_event("startup")
@@ -558,9 +467,6 @@ async def startup_event():
558
  """服务启动时初始化"""
559
  logger.info(f"OpenAI API代理服务已启动,可以接受请求")
560
  logger.info(f"支持多token轮询,请在Authorization头中使用英文逗号分隔多个token")
561
- logger.info(f"服务地址: http://127.0.0.1:7860")
562
- logger.info(f"OpenAI API格式请求示例: POST http://127.0.0.1:7860/v1/chat/completions")
563
- logger.info(f"可用模型查询: GET http://127.0.0.1:7860/v1/models")
564
 
565
  # 主程序
566
  if __name__ == "__main__":
 
228
 
229
  if line.startswith(b'data: '):
230
  try:
231
+ data = json.loads(line[6:].decode('utf-8'))
232
+
233
+ if data.get('code') == 202 and data.get('data', {}).get('type') == "chat":
234
+ # 获取正文内容
235
+ content = data.get('data', {}).get('content', '')
236
+ if content:
237
+ full_response += content
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
238
 
239
+ # 生成OpenAI格式的流式响应
240
+ chunk = {
241
  "id": f"chatcmpl-{request_id}",
242
  "object": "chat.completion.chunk",
243
  "created": timestamp,
 
246
  {
247
  "index": 0,
248
  "delta": {
249
+ "content": content
250
  },
251
+ "finish_reason": None
252
  }
253
  ]
254
  }
255
+ yield f"data: {json.dumps(chunk)}\n\n"
 
 
 
 
 
 
256
 
257
+ elif data.get('code') == 203:
258
+ # 生成完成信号
259
+ chunk = {
260
+ "id": f"chatcmpl-{request_id}",
261
+ "object": "chat.completion.chunk",
262
+ "created": timestamp,
263
+ "model": model,
264
+ "choices": [
265
+ {
266
+ "index": 0,
267
+ "delta": {},
268
+ "finish_reason": "stop"
269
+ }
270
+ ]
271
+ }
272
+ yield f"data: {json.dumps(chunk)}\n\n"
273
+ yield "data: [DONE]\n\n"
274
+
275
+ except json.JSONDecodeError:
276
+ logger.warning(f"无法解析响应: {line}")
 
277
 
278
  except Exception as e:
279
  logger.error(f"流式响应处理出错: {str(e)}")
 
306
  # 路由定义
307
  @app.get("/")
308
  async def root():
309
+ return {"message": "OpenAI API Proxy服务已启动 连接至DeepSider API"}
 
 
 
 
 
 
 
 
 
310
 
311
  @app.get("/v1/models")
312
  async def list_models(api_key: str = Depends(verify_api_key)):
 
359
  current_token_index = (TOKEN_INDEX - 1) % len(tokens) if len(tokens) > 0 else 0
360
 
361
  try:
 
 
 
362
  # 发送请求到DeepSider API
363
  response = requests.post(
364
  f"{DEEPSIDER_API_BASE}/chat/conversation",
 
389
  else:
390
  # 收集完整响应
391
  full_response = ""
 
 
 
392
  for line in response.iter_lines():
393
  if not line:
394
  continue
395
 
396
  if line.startswith(b'data: '):
397
  try:
398
+ data = json.loads(line[6:].decode('utf-8'))
399
+
400
+ if data.get('code') == 202 and data.get('data', {}).get('type') == "chat":
401
+ content = data.get('data', {}).get('content', '')
402
+ if content:
403
+ full_response += content
 
 
 
 
 
 
404
 
405
+ except json.JSONDecodeError:
406
+ pass
 
 
 
 
 
 
 
 
 
407
 
408
  # 返回OpenAI格式的完整响应
409
  return await generate_openai_response(full_response, request_id, chat_request.model)
 
459
  "type": "not_found_error",
460
  "code": "not_found"
461
  }
462
+ }, 404
 
 
 
 
 
 
 
 
 
 
463
 
464
  # 启动事件
465
  @app.on_event("startup")
 
467
  """服务启动时初始化"""
468
  logger.info(f"OpenAI API代理服务已启动,可以接受请求")
469
  logger.info(f"支持多token轮询,请在Authorization头中使用英文逗号分隔多个token")
 
 
 
470
 
471
  # 主程序
472
  if __name__ == "__main__":