Spaces:
Running
Running
Create main.ts
Browse files
main.ts
ADDED
@@ -0,0 +1,431 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { serve } from "https://deno.land/[email protected]/http/server.ts";
|
2 |
+
|
3 |
+
// 定义常量
|
4 |
+
const API_URL = "https://mcp.scira.ai/api/chat";
|
5 |
+
const FIXED_USER_ID = "2jFMDM1A1R_XxOTxPjhwe";
|
6 |
+
const FIXED_CHAT_ID = "ZIWa36kd6MSqzw-ifXGzE";
|
7 |
+
const DEFAULT_MODEL = "qwen-qwq";
|
8 |
+
const PORT = 8888;
|
9 |
+
|
10 |
+
// 定义接口
|
11 |
+
interface Message {
|
12 |
+
role: string;
|
13 |
+
content: string;
|
14 |
+
parts?: Array<{
|
15 |
+
type: string;
|
16 |
+
text: string;
|
17 |
+
}>;
|
18 |
+
}
|
19 |
+
|
20 |
+
interface SciraPayload {
|
21 |
+
id: string;
|
22 |
+
messages: Message[];
|
23 |
+
selectedModel: string;
|
24 |
+
mcpServers: any[];
|
25 |
+
chatId: string;
|
26 |
+
userId: string;
|
27 |
+
}
|
28 |
+
|
29 |
+
interface OpenAIModel {
|
30 |
+
id: string;
|
31 |
+
created: number;
|
32 |
+
object: string;
|
33 |
+
}
|
34 |
+
|
35 |
+
// 可用模型列表
|
36 |
+
const AVAILABLE_MODELS: OpenAIModel[] = [
|
37 |
+
{
|
38 |
+
id: "qwen-qwq",
|
39 |
+
created: Date.now(),
|
40 |
+
object: "model",
|
41 |
+
},
|
42 |
+
{
|
43 |
+
id: "gemini-2.5-flash",
|
44 |
+
created: Date.now(),
|
45 |
+
object: "model",
|
46 |
+
},
|
47 |
+
{
|
48 |
+
id: "gpt-4.1-mini",
|
49 |
+
created: Date.now(),
|
50 |
+
object: "model",
|
51 |
+
},
|
52 |
+
{
|
53 |
+
id: "claude-3-7-sonnet",
|
54 |
+
created: Date.now(),
|
55 |
+
object: "model",
|
56 |
+
},
|
57 |
+
];
|
58 |
+
|
59 |
+
// 格式化消息为Scira格式
|
60 |
+
function formatMessagesForScira(messages: Message[]): Message[] {
|
61 |
+
return messages.map(msg => ({
|
62 |
+
role: msg.role,
|
63 |
+
content: msg.content,
|
64 |
+
parts: [{
|
65 |
+
type: "text",
|
66 |
+
text: msg.content
|
67 |
+
}]
|
68 |
+
}));
|
69 |
+
}
|
70 |
+
|
71 |
+
// 构建Scira请求负载
|
72 |
+
function buildSciraPayload(messages: Message[], model = DEFAULT_MODEL): SciraPayload {
|
73 |
+
const formattedMessages = formatMessagesForScira(messages);
|
74 |
+
return {
|
75 |
+
id: FIXED_CHAT_ID,
|
76 |
+
messages: formattedMessages,
|
77 |
+
selectedModel: model,
|
78 |
+
mcpServers: [],
|
79 |
+
chatId: FIXED_CHAT_ID,
|
80 |
+
userId: FIXED_USER_ID
|
81 |
+
};
|
82 |
+
}
|
83 |
+
|
84 |
+
// 处理模型列表请求
|
85 |
+
async function handleModelsRequest(): Promise<Response> {
|
86 |
+
const response = {
|
87 |
+
object: "list",
|
88 |
+
data: AVAILABLE_MODELS,
|
89 |
+
};
|
90 |
+
return new Response(JSON.stringify(response), {
|
91 |
+
headers: {
|
92 |
+
"Content-Type": "application/json",
|
93 |
+
"Access-Control-Allow-Origin": "*"
|
94 |
+
},
|
95 |
+
});
|
96 |
+
}
|
97 |
+
|
98 |
+
// 处理聊天补全请求
|
99 |
+
async function handleChatCompletionsRequest(req: Request): Promise<Response> {
|
100 |
+
const requestData = await req.json();
|
101 |
+
const { messages, model = DEFAULT_MODEL, stream = false } = requestData;
|
102 |
+
|
103 |
+
const sciraPayload = buildSciraPayload(messages, model);
|
104 |
+
const response = await fetch(API_URL, {
|
105 |
+
method: "POST",
|
106 |
+
headers: {
|
107 |
+
"Content-Type": "application/json",
|
108 |
+
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:137.0) Gecko/20100101 Firefox/137.0",
|
109 |
+
"Accept": "*/*",
|
110 |
+
"Referer": `https://mcp.scira.ai/chat/${FIXED_CHAT_ID}`,
|
111 |
+
"Origin": "https://mcp.scira.ai",
|
112 |
+
},
|
113 |
+
body: JSON.stringify(sciraPayload),
|
114 |
+
});
|
115 |
+
|
116 |
+
if (stream) {
|
117 |
+
return handleStreamResponse(response, model);
|
118 |
+
} else {
|
119 |
+
return handleRegularResponse(response, model);
|
120 |
+
}
|
121 |
+
}
|
122 |
+
|
123 |
+
// 处理流式响应
|
124 |
+
async function handleStreamResponse(response: Response, model: string): Promise<Response> {
|
125 |
+
const reader = response.body!.getReader();
|
126 |
+
const encoder = new TextEncoder();
|
127 |
+
const decoder = new TextDecoder();
|
128 |
+
|
129 |
+
const id = `chatcmpl-${Date.now().toString(36)}${Math.random().toString(36).substring(2, 10)}`;
|
130 |
+
const createdTime = Math.floor(Date.now() / 1000);
|
131 |
+
const systemFingerprint = `fp_${Math.random().toString(36).substring(2, 12)}`;
|
132 |
+
|
133 |
+
const stream = new ReadableStream({
|
134 |
+
async start(controller) {
|
135 |
+
// 发送流式头部
|
136 |
+
const headerEvent = {
|
137 |
+
id: id,
|
138 |
+
object: "chat.completion.chunk",
|
139 |
+
created: createdTime,
|
140 |
+
model: model,
|
141 |
+
system_fingerprint: systemFingerprint,
|
142 |
+
choices: [{
|
143 |
+
index: 0,
|
144 |
+
delta: { role: "assistant" },
|
145 |
+
logprobs: null,
|
146 |
+
finish_reason: null
|
147 |
+
}]
|
148 |
+
};
|
149 |
+
controller.enqueue(encoder.encode(`data: ${JSON.stringify(headerEvent)}\n\n`));
|
150 |
+
|
151 |
+
try {
|
152 |
+
let buffer = "";
|
153 |
+
|
154 |
+
while (true) {
|
155 |
+
const { done, value } = await reader.read();
|
156 |
+
if (done) break;
|
157 |
+
|
158 |
+
// 解码当前数据块并添加到缓冲区
|
159 |
+
buffer += decoder.decode(value, { stream: true });
|
160 |
+
|
161 |
+
// 处理完整的行
|
162 |
+
const lines = buffer.split('\n');
|
163 |
+
// 保留最后一个可能不完整的行
|
164 |
+
buffer = lines.pop() || "";
|
165 |
+
|
166 |
+
// 处理并立即发送每一行
|
167 |
+
for (const line of lines) {
|
168 |
+
if (!line.trim()) continue;
|
169 |
+
|
170 |
+
if (line.startsWith('g:')) {
|
171 |
+
// 对于g开头的行,输出reasoning_content
|
172 |
+
let content = line.slice(2).replace(/^"/, "").replace(/"$/, "");
|
173 |
+
content = content.replace(/\\n/g, "\n");
|
174 |
+
|
175 |
+
const event = {
|
176 |
+
id: id,
|
177 |
+
object: "chat.completion.chunk",
|
178 |
+
created: createdTime,
|
179 |
+
model: model,
|
180 |
+
system_fingerprint: systemFingerprint,
|
181 |
+
choices: [{
|
182 |
+
index: 0,
|
183 |
+
delta: { reasoning_content: content },
|
184 |
+
logprobs: null,
|
185 |
+
finish_reason: null
|
186 |
+
}]
|
187 |
+
};
|
188 |
+
controller.enqueue(encoder.encode(`data: ${JSON.stringify(event)}\n\n`));
|
189 |
+
} else if (line.startsWith('0:')) {
|
190 |
+
// 对于0开头的行,输出content
|
191 |
+
let content = line.slice(2).replace(/^"/, "").replace(/"$/, "");
|
192 |
+
content = content.replace(/\\n/g, "\n");
|
193 |
+
|
194 |
+
const event = {
|
195 |
+
id: id,
|
196 |
+
object: "chat.completion.chunk",
|
197 |
+
created: createdTime,
|
198 |
+
model: model,
|
199 |
+
system_fingerprint: systemFingerprint,
|
200 |
+
choices: [{
|
201 |
+
index: 0,
|
202 |
+
delta: { content: content },
|
203 |
+
logprobs: null,
|
204 |
+
finish_reason: null
|
205 |
+
}]
|
206 |
+
};
|
207 |
+
controller.enqueue(encoder.encode(`data: ${JSON.stringify(event)}\n\n`));
|
208 |
+
} else if (line.startsWith('e:')) {
|
209 |
+
// 完成消息
|
210 |
+
try {
|
211 |
+
const finishData = JSON.parse(line.slice(2));
|
212 |
+
const event = {
|
213 |
+
id: id,
|
214 |
+
object: "chat.completion.chunk",
|
215 |
+
created: createdTime,
|
216 |
+
model: model,
|
217 |
+
system_fingerprint: systemFingerprint,
|
218 |
+
choices: [{
|
219 |
+
index: 0,
|
220 |
+
delta: {},
|
221 |
+
logprobs: null,
|
222 |
+
finish_reason: finishData.finishReason || "stop"
|
223 |
+
}]
|
224 |
+
};
|
225 |
+
controller.enqueue(encoder.encode(`data: ${JSON.stringify(event)}\n\n`));
|
226 |
+
} catch (error) {
|
227 |
+
console.error("Error parsing finish data:", error);
|
228 |
+
}
|
229 |
+
}
|
230 |
+
}
|
231 |
+
}
|
232 |
+
|
233 |
+
// 处理缓冲区中剩余的内容(如果有的话)
|
234 |
+
if (buffer.trim()) {
|
235 |
+
const line = buffer.trim();
|
236 |
+
if (line.startsWith('g:')) {
|
237 |
+
let content = line.slice(2).replace(/^"/, "").replace(/"$/, "");
|
238 |
+
content = content.replace(/\\n/g, "\n");
|
239 |
+
|
240 |
+
const event = {
|
241 |
+
id: id,
|
242 |
+
object: "chat.completion.chunk",
|
243 |
+
created: createdTime,
|
244 |
+
model: model,
|
245 |
+
system_fingerprint: systemFingerprint,
|
246 |
+
choices: [{
|
247 |
+
index: 0,
|
248 |
+
delta: { reasoning_content: content },
|
249 |
+
logprobs: null,
|
250 |
+
finish_reason: null
|
251 |
+
}]
|
252 |
+
};
|
253 |
+
controller.enqueue(encoder.encode(`data: ${JSON.stringify(event)}\n\n`));
|
254 |
+
} else if (line.startsWith('0:')) {
|
255 |
+
let content = line.slice(2).replace(/^"/, "").replace(/"$/, "");
|
256 |
+
content = content.replace(/\\n/g, "\n");
|
257 |
+
|
258 |
+
const event = {
|
259 |
+
id: id,
|
260 |
+
object: "chat.completion.chunk",
|
261 |
+
created: createdTime,
|
262 |
+
model: model,
|
263 |
+
system_fingerprint: systemFingerprint,
|
264 |
+
choices: [{
|
265 |
+
index: 0,
|
266 |
+
delta: { content: content },
|
267 |
+
logprobs: null,
|
268 |
+
finish_reason: null
|
269 |
+
}]
|
270 |
+
};
|
271 |
+
controller.enqueue(encoder.encode(`data: ${JSON.stringify(event)}\n\n`));
|
272 |
+
}
|
273 |
+
}
|
274 |
+
} catch (error) {
|
275 |
+
console.error("Stream error:", error);
|
276 |
+
} finally {
|
277 |
+
// 确保发送 "data: [DONE]"
|
278 |
+
controller.enqueue(encoder.encode("data: [DONE]\n\n"));
|
279 |
+
controller.close();
|
280 |
+
}
|
281 |
+
}
|
282 |
+
});
|
283 |
+
|
284 |
+
return new Response(stream, {
|
285 |
+
headers: {
|
286 |
+
"Content-Type": "text/event-stream",
|
287 |
+
"Cache-Control": "no-cache",
|
288 |
+
"Connection": "keep-alive",
|
289 |
+
"Access-Control-Allow-Origin": "*",
|
290 |
+
},
|
291 |
+
});
|
292 |
+
}
|
293 |
+
|
294 |
+
// 处理非流式响应
|
295 |
+
async function handleRegularResponse(response: Response, model: string): Promise<Response> {
|
296 |
+
const text = await response.text();
|
297 |
+
const lines = text.split('\n');
|
298 |
+
|
299 |
+
let content = "";
|
300 |
+
let reasoning_content = "";
|
301 |
+
let usage = { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 };
|
302 |
+
let finish_reason = "stop";
|
303 |
+
|
304 |
+
for (const line of lines) {
|
305 |
+
if (!line.trim()) continue;
|
306 |
+
|
307 |
+
if (line.startsWith('0:')) {
|
308 |
+
// 常规内容 - 处理转义的换行符
|
309 |
+
let lineContent = line.slice(2).replace(/^"/, "").replace(/"$/, "");
|
310 |
+
lineContent = lineContent.replace(/\\n/g, "\n");
|
311 |
+
content += lineContent;
|
312 |
+
} else if (line.startsWith('g:')) {
|
313 |
+
// 推理内容 - 处理转义的换行符
|
314 |
+
let lineContent = line.slice(2).replace(/^"/, "").replace(/"$/, "");
|
315 |
+
lineContent = lineContent.replace(/\\n/g, "\n");
|
316 |
+
reasoning_content += lineContent;
|
317 |
+
} else if (line.startsWith('e:')) {
|
318 |
+
try {
|
319 |
+
const finishData = JSON.parse(line.slice(2));
|
320 |
+
if (finishData.finishReason) {
|
321 |
+
finish_reason = finishData.finishReason;
|
322 |
+
}
|
323 |
+
} catch (error) {
|
324 |
+
console.error("Error parsing finish data:", error);
|
325 |
+
}
|
326 |
+
} else if (line.startsWith('d:')) {
|
327 |
+
try {
|
328 |
+
const finishData = JSON.parse(line.slice(2));
|
329 |
+
if (finishData.usage) {
|
330 |
+
usage.prompt_tokens = finishData.usage.promptTokens || 0;
|
331 |
+
usage.completion_tokens = finishData.usage.completionTokens || 0;
|
332 |
+
usage.total_tokens = usage.prompt_tokens + usage.completion_tokens;
|
333 |
+
}
|
334 |
+
} catch (error) {
|
335 |
+
console.error("Error parsing usage data:", error);
|
336 |
+
}
|
337 |
+
}
|
338 |
+
}
|
339 |
+
|
340 |
+
const systemFingerprint = `fp_${Math.random().toString(36).substring(2, 12)}`;
|
341 |
+
const id = `chatcmpl-${Date.now().toString(36)}${Math.random().toString(36).substring(2, 10)}`;
|
342 |
+
|
343 |
+
const openAIResponse = {
|
344 |
+
id: id,
|
345 |
+
object: "chat.completion",
|
346 |
+
created: Math.floor(Date.now() / 1000),
|
347 |
+
model: model,
|
348 |
+
system_fingerprint: systemFingerprint,
|
349 |
+
choices: [{
|
350 |
+
index: 0,
|
351 |
+
message: {
|
352 |
+
role: "assistant",
|
353 |
+
content: content
|
354 |
+
},
|
355 |
+
logprobs: null,
|
356 |
+
finish_reason: finish_reason
|
357 |
+
}],
|
358 |
+
usage: usage
|
359 |
+
};
|
360 |
+
|
361 |
+
// 如果存在推理内容,添加到消息中
|
362 |
+
if (reasoning_content.trim()) {
|
363 |
+
openAIResponse.choices[0].message.reasoning_content = reasoning_content;
|
364 |
+
}
|
365 |
+
|
366 |
+
return new Response(JSON.stringify(openAIResponse), {
|
367 |
+
headers: {
|
368 |
+
"Content-Type": "application/json",
|
369 |
+
"Access-Control-Allow-Origin": "*"
|
370 |
+
},
|
371 |
+
});
|
372 |
+
}
|
373 |
+
|
374 |
+
// 主请求处理函数
|
375 |
+
async function handler(req: Request): Promise<Response> {
|
376 |
+
const url = new URL(req.url);
|
377 |
+
|
378 |
+
// 设置CORS头
|
379 |
+
const headers = {
|
380 |
+
"Access-Control-Allow-Origin": "*",
|
381 |
+
"Access-Control-Allow-Methods": "GET, POST, OPTIONS",
|
382 |
+
"Access-Control-Allow-Headers": "Content-Type, Authorization",
|
383 |
+
};
|
384 |
+
|
385 |
+
// 处理OPTIONS请求(CORS预检)
|
386 |
+
if (req.method === "OPTIONS") {
|
387 |
+
return new Response(null, {
|
388 |
+
headers,
|
389 |
+
status: 204
|
390 |
+
});
|
391 |
+
}
|
392 |
+
|
393 |
+
try {
|
394 |
+
// 处理模型列表接口
|
395 |
+
if (url.pathname === "/v1/models") {
|
396 |
+
return handleModelsRequest();
|
397 |
+
}
|
398 |
+
|
399 |
+
// 处理聊天补全接口
|
400 |
+
if (url.pathname === "/v1/chat/completions") {
|
401 |
+
return handleChatCompletionsRequest(req);
|
402 |
+
}
|
403 |
+
|
404 |
+
// 未找到的路由
|
405 |
+
return new Response(
|
406 |
+
JSON.stringify({ error: "Not found" }), {
|
407 |
+
status: 404,
|
408 |
+
headers: {
|
409 |
+
"Content-Type": "application/json",
|
410 |
+
...headers
|
411 |
+
},
|
412 |
+
}
|
413 |
+
);
|
414 |
+
} catch (error) {
|
415 |
+
console.error("Error processing request:", error);
|
416 |
+
return new Response(
|
417 |
+
JSON.stringify({ error: error.message || "Internal server error" }),
|
418 |
+
{
|
419 |
+
status: 500,
|
420 |
+
headers: {
|
421 |
+
"Content-Type": "application/json",
|
422 |
+
...headers
|
423 |
+
},
|
424 |
+
}
|
425 |
+
);
|
426 |
+
}
|
427 |
+
}
|
428 |
+
|
429 |
+
// 启动服务器
|
430 |
+
console.log(`Starting server on port ${PORT}...`);
|
431 |
+
serve(handler, { port: PORT });
|