File size: 3,713 Bytes
dff2be9
8cfdcec
 
dff2be9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e87a324
5012205
dff2be9
 
 
 
 
 
 
 
 
 
 
 
5012205
 
 
 
 
 
 
 
 
dff2be9
5012205
dff2be9
5012205
 
 
 
 
dff2be9
 
 
 
5012205
 
 
 
 
67c6e06
55947a0
5012205
 
 
dff2be9
5012205
 
 
 
 
 
 
 
 
 
 
67c6e06
 
5012205
67c6e06
dff2be9
 
0c91a71
dff2be9
67c6e06
dff2be9
 
 
 
67c6e06
 
7984c85
5012205
0c91a71
67c6e06
 
 
 
 
dff2be9
67c6e06
5012205
67c6e06
5012205
 
 
67c6e06
5012205
8cfdcec
 
 
 
 
 
dff2be9
 
 
 
8cfdcec
 
 
 
5012205
dff2be9
 
e87a324
dff2be9
 
 
 
5012205
 
dff2be9
67c6e06
dff2be9
 
5012205
 
dff2be9
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
import { openai } from "@/lib/openai-client";
import { hf } from "@/lib/hf-client";
import { getModels, type ModelID } from "@/lib/models";
import { saveChat } from "@/lib/chat-store";
import { nanoid } from "nanoid";
import { db } from "@/lib/db";
import { chats } from "@/lib/db/schema";
import { eq, and } from "drizzle-orm";
import { initializeMCPClients, type MCPServerConfig } from "@/lib/mcp-client";
import { generateTitle } from "@/app/actions";
import { createOpenAIStream } from "@/lib/openai-stream";
import type {
  ChatCompletionTool,
  ChatCompletionMessageParam,
} from "openai/resources";

export const runtime = "nodejs";

// Allow streaming responses up to 120 seconds
export const maxDuration = 120;

export const dynamic = "force-dynamic";

function mcpToolsToOpenAITools(
  tools: Record<string, any>
): ChatCompletionTool[] {
  return Object.entries(tools).map(
    ([name, schema]): ChatCompletionTool => ({
      type: "function",
      function: { name, parameters: schema.parameters },
    })
  );
}

export async function POST(req: Request) {
  const {
    messages,
    chatId,
    selectedModel,
    userId,
    mcpServers = [],
  }: {
    messages: ChatCompletionMessageParam[];
    chatId?: string;
    selectedModel: ModelID;
    userId: string;
    mcpServers?: MCPServerConfig[];
  } = await req.json();

  if (!userId) {
    return new Response(JSON.stringify({ error: "User ID is required" }), {
      status: 400,
      headers: { "Content-Type": "application/json" },
    });
  }

  const id = chatId || nanoid();

  // Check if chat already exists for the given ID
  // If not, create it now
  let isNewChat = false;
  if (chatId) {
    try {
      const existingChat = await db.query.chats.findFirst({
        where: and(eq(chats.id, chatId), eq(chats.userId, userId)),
      });
      isNewChat = !existingChat;
    } catch (error) {
      console.error("Error checking for existing chat:", error);
      isNewChat = true;
    }
  } else {
    // No ID provided, definitely new
    isNewChat = true;
  }

  // If it's a new chat, save it immediately
  if (isNewChat && messages.length > 0) {
    try {
      // Generate a title based on first user message
      const userMessage = messages.find((m) => m.role === "user");
      let title = "New Chat";

      if (userMessage && typeof userMessage.content === "string") {
        try {
          // The generateTitle function expects a UIMessage[], let's adapt
          title = await generateTitle([
            { role: "user", content: userMessage.content, id: "temp-id" },
          ]);
        } catch (error) {
          console.error("Error generating title:", error);
        }
      }

      // Save the chat immediately so it appears in the sidebar
      await saveChat({
        id,
        userId,
        title,
        messages: [], // Messages will be saved by the client
      });
    } catch (error) {
      console.error("Error saving new chat:", error);
    }
  }

  const { tools, cleanup } = await initializeMCPClients(mcpServers, req.signal);

  const hfModels = await getModels();
  const client = hfModels.includes(selectedModel) ? hf : openai;

  const openAITools = mcpToolsToOpenAITools(tools);

  const completion = await client.chat.completions.create(
    {
      model: selectedModel,
      stream: true,
      messages,
      ...(openAITools.length > 0 && {
        tools: openAITools,
        tool_choice: "auto",
      }),
    },
    { signal: req.signal }
  );

  const stream = createOpenAIStream(completion, {
    onFinal() {
      cleanup();
    },
  });

  return new Response(stream, {
    headers: {
      "Content-Type": "text/event-stream",
      "X-Chat-ID": id,
    },
  });
}