Spaces:
Running
Running
import { openai } from "@/lib/openai-client"; | |
import { hf } from "@/lib/hf-client"; | |
import { getModels, type ModelID } from "@/lib/models"; | |
import { saveChat } from "@/lib/chat-store"; | |
import { nanoid } from "nanoid"; | |
import { db } from "@/lib/db"; | |
import { chats } from "@/lib/db/schema"; | |
import { eq, and } from "drizzle-orm"; | |
import { initializeMCPClients, type MCPServerConfig } from "@/lib/mcp-client"; | |
import { generateTitle } from "@/app/actions"; | |
import { createOpenAIStream } from "@/lib/openai-stream"; | |
import type { | |
ChatCompletionTool, | |
ChatCompletionMessageParam, | |
} from "openai/resources"; | |
export const runtime = "nodejs"; | |
// Allow streaming responses up to 120 seconds | |
export const maxDuration = 120; | |
export const dynamic = "force-dynamic"; | |
function mcpToolsToOpenAITools( | |
tools: Record<string, any> | |
): ChatCompletionTool[] { | |
return Object.entries(tools).map( | |
([name, schema]): ChatCompletionTool => ({ | |
type: "function", | |
function: { name, parameters: schema.parameters }, | |
}) | |
); | |
} | |
export async function POST(req: Request) { | |
const { | |
messages, | |
chatId, | |
selectedModel, | |
userId, | |
mcpServers = [], | |
}: { | |
messages: ChatCompletionMessageParam[]; | |
chatId?: string; | |
selectedModel: ModelID; | |
userId: string; | |
mcpServers?: MCPServerConfig[]; | |
} = await req.json(); | |
if (!userId) { | |
return new Response(JSON.stringify({ error: "User ID is required" }), { | |
status: 400, | |
headers: { "Content-Type": "application/json" }, | |
}); | |
} | |
const id = chatId || nanoid(); | |
// Check if chat already exists for the given ID | |
// If not, create it now | |
let isNewChat = false; | |
if (chatId) { | |
try { | |
const existingChat = await db.query.chats.findFirst({ | |
where: and(eq(chats.id, chatId), eq(chats.userId, userId)), | |
}); | |
isNewChat = !existingChat; | |
} catch (error) { | |
console.error("Error checking for existing chat:", error); | |
isNewChat = true; | |
} | |
} else { | |
// No ID provided, definitely new | |
isNewChat = true; | |
} | |
// If it's a new chat, save it immediately | |
if (isNewChat && messages.length > 0) { | |
try { | |
// Generate a title based on first user message | |
const userMessage = messages.find((m) => m.role === "user"); | |
let title = "New Chat"; | |
if (userMessage && typeof userMessage.content === "string") { | |
try { | |
// The generateTitle function expects a UIMessage[], let's adapt | |
title = await generateTitle([ | |
{ role: "user", content: userMessage.content, id: "temp-id" }, | |
]); | |
} catch (error) { | |
console.error("Error generating title:", error); | |
} | |
} | |
// Save the chat immediately so it appears in the sidebar | |
await saveChat({ | |
id, | |
userId, | |
title, | |
messages: [], // Messages will be saved by the client | |
}); | |
} catch (error) { | |
console.error("Error saving new chat:", error); | |
} | |
} | |
const { tools, cleanup } = await initializeMCPClients(mcpServers, req.signal); | |
const hfModels = await getModels(); | |
const client = hfModels.includes(selectedModel) ? hf : openai; | |
const openAITools = mcpToolsToOpenAITools(tools); | |
const completion = await client.chat.completions.create( | |
{ | |
model: selectedModel, | |
stream: true, | |
messages, | |
...(openAITools.length > 0 && { | |
tools: openAITools, | |
tool_choice: "auto", | |
}), | |
}, | |
{ signal: req.signal } | |
); | |
const stream = createOpenAIStream(completion, { | |
onFinal() { | |
cleanup(); | |
}, | |
}); | |
return new Response(stream, { | |
headers: { | |
"Content-Type": "text/event-stream", | |
"X-Chat-ID": id, | |
}, | |
}); | |
} | |