tianlong12 commited on
Commit
6ac755c
·
verified ·
1 Parent(s): 52e2f7c

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +336 -0
app.py ADDED
@@ -0,0 +1,336 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import random
2
+ import time
3
+ import json
4
+ import re
5
+ import aiohttp
6
+ import requests
7
+ import asyncio
8
+ from flask import Flask, request, Response
9
+ from flask_cors import CORS
10
+ import uuid
11
+
12
+ app = Flask(__name__)
13
+ CORS(app)
14
+
15
+ MAGAI_TOKEN = {
16
+ "cookie": "_fbp=fb.1.1722349051350.28463402121267809;soquick-mobile_u1main=1722349236461x685414888067651300;intercom-id-jnjoad6e=cbbd8fc9-a010-4e8c-8e7e-9cffccd3abea;soquick-mobile_live_u2main.sig=HuQePfrEHGidu4eRyfiZkcL1_2E;__stripe_mid=7767e1a3-e87f-4456-b073-6c8b7ae9e82119b00d;__stripe_sid=99c612a5-a12a-426f-baa5-e61471a013f140c482;_ga=GA1.1.242967908.1722349051;_ga_GFQ25YSHT2=GS1.1.1726123356.1.0.1726123393.0.0.0;_ga_N5J29RVHDJ=GS1.1.1726123395.4.1.1726123416.0.0.0;intercom-device-id-jnjoad6e=35ee824e-f7f6-415d-8698-bd822cb46d3a;intercom-session-jnjoad6e=TS9MVXB3RVNxOWJBTlVxN3MzcFRhZGJRL05TN2FpUlI2MVpnc3JDaG9TZi81ZERUNXkweVdzVUxicCt5VmJKVy0tTmo4d1loRSs0dWlqRUJmMm1NK2tHUT09--2f27d9afeb23d0d2ba40d57ca6ec33bb6ddb20d1;soquick-mobile_live_u2main=bus|1722349236461x685414888067651300|1726123417637x655253536227564700",
17
+ "app_last_change": "填写app_last_change",
18
+ "current_page_item": "填写current_page_item",
19
+ "current_user": "填写current_user",
20
+ }
21
+
22
+ MAGAI_MAPPING = {
23
+ "gpt-4o": "openai/gpt-4o",
24
+ "claude-3.5-sonnet": "anthropic/claude-3.5-sonnet:beta",
25
+ "claude-3-opus": "anthropic/claude-3-opus:beta",
26
+ "gemini-1.5-pro": "google/gemini-pro-1.5"
27
+ }
28
+
29
+ UUID_LENGTH = 1e18
30
+ MODULO = 1e18
31
+
32
+ def generate_uuid():
33
+ return f"{int(time.time() * 1000)}x{str(round(random.random() * UUID_LENGTH)).zfill(18)}"
34
+
35
+ def create_luid(separator="x"):
36
+ timestamp = int(time.time() * 1000)
37
+ return f"{timestamp}{separator}1"
38
+
39
+ def format_model_name(model_name):
40
+ return re.sub(r"_+", "_", re.sub(r"[/:-]", "_", model_name))
41
+
42
+
43
+ def find_token_in_object(obj):
44
+ if isinstance(obj, dict):
45
+ for key, value in obj.items():
46
+ if key == "token" and isinstance(value, str):
47
+ return value
48
+ token = find_token_in_object(value)
49
+ if token:
50
+ return token
51
+ return None
52
+
53
+
54
+ def get_last_user_content(messages):
55
+ for message in reversed(messages):
56
+ if message["role"] == "user":
57
+ return message["content"]
58
+ return None
59
+
60
+
61
+ async def get_token(model, message):
62
+ server_call_id = generate_uuid()
63
+ created_id = MAGAI_TOKEN["current_page_item"].split("__")[0]
64
+ user_id = MAGAI_TOKEN["current_user"].split("__")[2]
65
+ model_id = "0060f9accd1dbade552f65ac646fb3da"
66
+ item_id = "bUNih7"
67
+ element_id = "bUNib7"
68
+
69
+ body = {
70
+ "app_last_change": MAGAI_TOKEN["app_last_change"],
71
+ "calls": [
72
+ {
73
+ "client_state": {
74
+ "element_instances": {
75
+ "bUNib7": {
76
+ "dehydrated": f"{created_id}__LOOKUP__ElementInstance::bUNib7",
77
+ "parent_element_id": "bUMiq3",
78
+ },
79
+ "bTezP": {
80
+ "dehydrated": f"{created_id}__LOOKUP__ElementInstance::bTezP",
81
+ "parent_element_id": "bTezJ",
82
+ },
83
+ "bTezE": {
84
+ "dehydrated": f"{created_id}__LOOKUP__ElementInstance::bTezE",
85
+ "parent_element_id": "bTeqc",
86
+ },
87
+ "bTezJ": {
88
+ "dehydrated": f"{created_id}__LOOKUP__ElementInstance::bTezJ",
89
+ "parent_element_id": "bUKFL2",
90
+ },
91
+ "bTezQ": {
92
+ "dehydrated": f"{created_id}__LOOKUP__ElementInstance::bTezQ",
93
+ "parent_element_id": "bUKFL2",
94
+ },
95
+ "bUiru0": {
96
+ "dehydrated": f"{created_id}__LOOKUP__ElementInstance::bUiru0",
97
+ "parent_element_id": "bUjNK",
98
+ },
99
+ "bUDVj0": {
100
+ "dehydrated": f"{created_id}__LOOKUP__ElementInstance::bUDVj0",
101
+ "parent_element_id": "bUMiq3",
102
+ },
103
+ "bUXzm2": {
104
+ "dehydrated": f"{created_id}__LOOKUP__ElementInstance::bUXzm2",
105
+ "parent_element_id": "bUMhk3",
106
+ },
107
+ "bUifI1": {
108
+ "dehydrated": f"{created_id}__LOOKUP__ElementInstance::bUifI1",
109
+ "parent_element_id": "bTeqg",
110
+ },
111
+ "bUMiq3": {
112
+ "dehydrated": f"{created_id}__LOOKUP__ElementInstance::bUMiq3",
113
+ "parent_element_id": "bTezE",
114
+ },
115
+ "bTekm": {
116
+ "dehydrated": f"{created_id}__LOOKUP__ElementInstance::bTekm",
117
+ "parent_element_id": None,
118
+ },
119
+ },
120
+ "element_state": {
121
+ f"{created_id}__LOOKUP__ElementInstance::bTezP": {
122
+ "is_visible": True,
123
+ "value_that_is_valid": message,
124
+ "value": message,
125
+ },
126
+ f"{created_id}__LOOKUP__ElementInstance::bTezE": {
127
+ "custom.images_": None,
128
+ "custom.file_": None,
129
+ "custom.file_content_": None,
130
+ "custom.file_name_": None,
131
+ "custom.file_type_": None,
132
+ },
133
+ f"{created_id}__LOOKUP__ElementInstance::bTezJ": {
134
+ "custom.isrecording_": None,
135
+ "custom.prompt_": None,
136
+ },
137
+ f"{created_id}__LOOKUP__ElementInstance::bUiru0": {
138
+ "AAE": message
139
+ },
140
+ f"{created_id}__LOOKUP__ElementInstance::bUDVj0": {
141
+ "AAE": message
142
+ },
143
+ f"{created_id}__LOOKUP__ElementInstance::bUifI1": {
144
+ "custom.is_visible_": None,
145
+ "group_data": None,
146
+ },
147
+ f"{created_id}__LOOKUP__ElementInstance::bUMiq3": {
148
+ "group_data": None
149
+ },
150
+ },
151
+ "other_data": {
152
+ "Current Page Scroll Position": 0,
153
+ "Current Page Width": 661,
154
+ },
155
+ "cache": {
156
+ f"{model_id}": format_model_name(model),
157
+ "true": True,
158
+ "CurrentPageItem": MAGAI_TOKEN["current_page_item"],
159
+ "CurrentUser": MAGAI_TOKEN["current_user"],
160
+ },
161
+ "exists": {
162
+ f"{model_id}": True,
163
+ "true": True,
164
+ "CurrentPageItem": True,
165
+ "CurrentUser": True,
166
+ },
167
+ },
168
+ "run_id": generate_uuid(),
169
+ "server_call_id": server_call_id,
170
+ "item_id": item_id,
171
+ "element_id": element_id,
172
+ "uid_generator": {
173
+ "timestamp": int(time.time() * 1000),
174
+ "seed": round(random.random() * UUID_LENGTH) % MODULO,
175
+ },
176
+ "random_seed": random.random(),
177
+ "current_date_time": int(time.time() * 1000),
178
+ "current_wf_params": {},
179
+ }
180
+ ],
181
+ "client_breaking_revision": 5,
182
+ "timezone_offset": -480,
183
+ "timezone_string": "Asia/Shanghai",
184
+ "user_id": user_id,
185
+ "wait_for": [],
186
+ }
187
+
188
+ url = "https://app.magai.co/workflow/start"
189
+ async with aiohttp.ClientSession() as session:
190
+ async with session.post(
191
+ url,
192
+ headers={
193
+ "x-bubble-fiber-id": generate_uuid(),
194
+ "x-bubble-pl": create_luid(),
195
+ "accept": "application/json, text/javascript, */*; q=0.01",
196
+ "cookie": MAGAI_TOKEN["cookie"],
197
+ },
198
+ json=body,
199
+ ) as response:
200
+ response_data = await response.json()
201
+
202
+ if "error_class" in response_data:
203
+ raise Exception(response_data)
204
+
205
+ server_call_data = response_data.get(server_call_id)
206
+ if not server_call_data or "step_results" not in server_call_data:
207
+ return None
208
+ for step_result in server_call_data["step_results"].values():
209
+ if isinstance(step_result.get("return_value"), dict):
210
+ token = find_token_in_object(step_result["return_value"])
211
+ if token:
212
+ return token
213
+
214
+
215
+ async def get_request_data(model, messages):
216
+ if model not in MAGAI_MAPPING:
217
+ return Response(
218
+ json.dumps(
219
+ {
220
+ "error": {
221
+ "message": "This model is currently unavailable. Please try again later or choose another model.",
222
+ "code": "model_not_exists",
223
+ }
224
+ }
225
+ ),
226
+ status=400,
227
+ mimetype="application/json",
228
+ )
229
+
230
+ last_user_message = get_last_user_content(messages)
231
+ token = await get_token(MAGAI_MAPPING[model], last_user_message)
232
+ headers = {
233
+ "Content-Type": "application/json",
234
+ "HTTP-Referer": "https://magai.co",
235
+ "Origin": "https://app.magai.co",
236
+ "Pragma": "no-cache",
237
+ "Referer": "https://app.magai.co/",
238
+ "Token": token,
239
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/500.00 (KHTML, like Gecko) Chrome/100.0.0.0 Safari/500.00",
240
+ }
241
+
242
+ json_data = {
243
+ "model": MAGAI_MAPPING[model],
244
+ "messages": [{"role": "system", "content": "You are a helpful assistant."}]
245
+ + messages,
246
+ "tools": [
247
+ {
248
+ "type": "function",
249
+ "function": {
250
+ "name": "get_actual_time_info",
251
+ "description": "Returns actual information from web about prompt theme.",
252
+ "parameters": {
253
+ "type": "object",
254
+ "properties": {
255
+ "query": {
256
+ "type": "string",
257
+ "description": "The query string based on users prompt to search information about.",
258
+ }
259
+ },
260
+ "required": ["query"],
261
+ },
262
+ },
263
+ },
264
+ {
265
+ "type": "function",
266
+ "function": {
267
+ "name": "generate_image",
268
+ "description": "Returns generated image URL.",
269
+ "parameters": {
270
+ "type": "object",
271
+ "properties": {
272
+ "query": {
273
+ "type": "string",
274
+ "description": "Prompt to image generation AI model, that describes what image to generate.",
275
+ }
276
+ },
277
+ "required": ["query"],
278
+ },
279
+ },
280
+ },
281
+ ],
282
+ "provider": {"data_collection": "deny"},
283
+ "tool_choice": "auto",
284
+ "stream": True,
285
+ }
286
+
287
+ response = requests.post(
288
+ "https://live.proxy.magai.co:4430/opr/api/v1/chat/completions",
289
+ headers=headers,
290
+ json=json_data,
291
+ )
292
+ return response
293
+
294
+
295
+ def format_response(response):
296
+ content = ""
297
+ for line in response.iter_lines():
298
+ if line:
299
+ decoded_line = line.decode("utf-8")
300
+ if decoded_line.startswith("data:"):
301
+ try:
302
+ data = json.loads(decoded_line[5:].strip())
303
+ if "choices" in data and len(data["choices"]) > 0:
304
+ delta = data["choices"][0].get("delta", {})
305
+ if "content" in delta:
306
+ content += delta["content"]
307
+ except json.JSONDecodeError:
308
+ pass
309
+ return content
310
+
311
+
312
+ @app.route("/hf/v1/chat/completions", methods=["POST"])
313
+ def chat_completions():
314
+ data = request.json
315
+ messages = data.get("messages", [])
316
+ model = data.get("model", "claude-3.5-sonnet")
317
+
318
+ async def process_request():
319
+ response = await get_request_data(model, messages)
320
+ return format_response(response)
321
+
322
+ loop = asyncio.new_event_loop()
323
+ asyncio.set_event_loop(loop)
324
+ result = loop.run_until_complete(process_request())
325
+
326
+ event_stream_response = ""
327
+ for part in result:
328
+ part = part.replace("\n", "\\n")
329
+ event_stream_response += f'data:{{"id":"{uuid.uuid4()}","object":"chat.completion.chunk","created":{int(time.time())},"model":"{model}","system_fingerprint":"fp_45ah8ld5a7","choices":[{{"index":0,"delta":{{"content":"{part}"}},"logprobs":null,"finish_reason":null}}]}}\n\n'
330
+ event_stream_response += "data:[DONE]\n"
331
+
332
+ return Response(event_stream_response, mimetype="text/event-stream")
333
+
334
+
335
+ if __name__ == "__main__":
336
+ app.run(host="0.0.0.0", port=7860)