AIMaster7 commited on
Commit
f516184
·
verified ·
1 Parent(s): 1eccefb

fixed syntax error

Browse files
Files changed (1) hide show
  1. main.py +40 -39
main.py CHANGED
@@ -40,57 +40,57 @@ async def chat_completion(request: ChatRequest):
40
  "model": request.model
41
  }
42
 
43
- if request.stream:
44
- def event_stream():
45
- chat_id = f"chatcmpl-{unix_id()}"
46
- created = int(time.time())
47
- sent_done = False
48
 
49
- with requests.post(
50
- "https://www.chatwithmono.xyz/api/chat",
51
- headers=headers,
52
- json=payload,
53
- stream=True,
54
- timeout=120
55
- ) as response:
56
- for line in response.iter_lines(decode_unicode=True):
57
- if line.startswith("0:"):
58
- try:
59
- content_piece = json.loads(line[2:])
60
- chunk_data = {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
61
  "id": chat_id,
62
  "object": "chat.completion.chunk",
63
  "created": created,
64
  "model": request.model,
65
  "choices": [
66
  {
67
- "delta": {"content": content_piece},
68
  "index": 0,
69
- "finish_reason": None
70
  }
71
  ]
72
  }
73
- yield f"data: {json.dumps(chunk_data)}\n\n"
74
- except:
75
- continue
76
- elif line.startswith(("e:", "d:")) and not sent_done:
77
- sent_done = True
78
- done_chunk = {
79
- "id": chat_id,
80
- "object": "chat.completion.chunk",
81
- "created": created,
82
- "model": request.model,
83
- "choices": [
84
- {
85
- "delta": {},
86
- "index": 0,
87
- "finish_reason": "stop"
88
- }
89
- ]
90
- }
91
- yield f"data: {json.dumps(done_chunk)}\n\ndata: [DONE]\n\n"
92
 
93
- return StreamingResponse(event_stream(), media_type="text/event-stream")
94
 
95
  else:
96
  assistant_response = ""
@@ -136,3 +136,4 @@ if request.stream:
136
  "total_tokens": usage_info.get("promptTokens", 0) + usage_info.get("completionTokens", 0),
137
  }
138
  })
 
 
40
  "model": request.model
41
  }
42
 
43
+ if request.stream:
44
+ def event_stream():
45
+ chat_id = f"chatcmpl-{unix_id()}"
46
+ created = int(time.time())
47
+ sent_done = False
48
 
49
+ with requests.post(
50
+ "https://www.chatwithmono.xyz/api/chat",
51
+ headers=headers,
52
+ json=payload,
53
+ stream=True,
54
+ timeout=120
55
+ ) as response:
56
+ for line in response.iter_lines(decode_unicode=True):
57
+ if line.startswith("0:"):
58
+ try:
59
+ content_piece = json.loads(line[2:])
60
+ chunk_data = {
61
+ "id": chat_id,
62
+ "object": "chat.completion.chunk",
63
+ "created": created,
64
+ "model": request.model,
65
+ "choices": [
66
+ {
67
+ "delta": {"content": content_piece},
68
+ "index": 0,
69
+ "finish_reason": None
70
+ }
71
+ ]
72
+ }
73
+ yield f"data: {json.dumps(chunk_data)}\n\n"
74
+ except:
75
+ continue
76
+ elif line.startswith(("e:", "d:")) and not sent_done:
77
+ sent_done = True
78
+ done_chunk = {
79
  "id": chat_id,
80
  "object": "chat.completion.chunk",
81
  "created": created,
82
  "model": request.model,
83
  "choices": [
84
  {
85
+ "delta": {},
86
  "index": 0,
87
+ "finish_reason": "stop"
88
  }
89
  ]
90
  }
91
+ yield f"data: {json.dumps(done_chunk)}\n\ndata: [DONE]\n\n"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
92
 
93
+ return StreamingResponse(event_stream(), media_type="text/event-stream")
94
 
95
  else:
96
  assistant_response = ""
 
136
  "total_tokens": usage_info.get("promptTokens", 0) + usage_info.get("completionTokens", 0),
137
  }
138
  })
139
+ all