diff --git a/.env.example b/.env.example new file mode 100644 index 0000000000000000000000000000000000000000..46a8129df47f4217ab5a6b66582872341dfb8250 --- /dev/null +++ b/.env.example @@ -0,0 +1,3 @@ +XAI_API_KEY="" +OPENAI_API_KEY= +DATABASE_URL="postgresql://username:password@host:port/database" diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000000000000000000000000000000000..df689922834cbd64bbf6a2052ee7935fb26728cf --- /dev/null +++ b/.gitattributes @@ -0,0 +1,3 @@ +*.png filter=lfs diff=lfs merge=lfs -text +*.ico filter=lfs diff=lfs merge=lfs -text +*.svg filter=lfs diff=lfs merge=lfs -text diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..b4fe939eace7bf9de4eb731b16200e1c8085db21 --- /dev/null +++ b/.gitignore @@ -0,0 +1,41 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.* +.yarn/* +!.yarn/patches +!.yarn/plugins +!.yarn/releases +!.yarn/versions + +# testing +/coverage + +# next.js +/.next/ +/out/ + +# production +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.pnpm-debug.log* + +# env files (can opt-in for committing if needed) +.env.local + +# vercel +.vercel + +# typescript +*.tsbuildinfo +next-env.d.ts \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000000000000000000000000000000000000..99773e210b7f75df0d603a38d7aa6aef45cdee6e --- /dev/null +++ b/README.md @@ -0,0 +1,138 @@ + +

Vercel x xAI Chatbot

+
+ +

+ An open-source AI chatbot app template built with Next.js, the AI SDK by Vercel, and xAI. +

+ +

+ Features · + Deploy Your Own · + Running Locally · + MCP Configuration · + Authors +

+
+ +## Features + +- Streaming text responses powered by the [AI SDK by Vercel](https://sdk.vercel.ai/docs), allowing multiple AI providers to be used interchangeably with just a few lines of code. +- Built-in tool integration for extending AI capabilities (demonstrated with a weather tool example). +- Support for [Model Context Protocol (MCP)](https://modelcontextprotocol.io) servers to expand available tools. +- Multiple MCP transport types (SSE and stdio) for connecting to various tool providers. +- Reasoning model support. +- [shadcn/ui](https://ui.shadcn.com/) components for a modern, responsive UI powered by [Tailwind CSS](https://tailwindcss.com). +- Built with the latest [Next.js](https://nextjs.org) App Router. + +## Deploy Your Own + +You can deploy your own version to Vercel by clicking the button below: + +[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?project-name=Vercel+x+xAI+Chatbot&repository-name=ai-sdk-starter-xai&repository-url=https%3A%2F%2Fgithub.com%2Fvercel-labs%2Fai-sdk-starter-xai&demo-title=Vercel+x+xAI+Chatbot&demo-url=https%3A%2F%2Fai-sdk-starter-xai.labs.vercel.dev%2F&demo-description=A+simple+chatbot+application+built+with+Next.js+that+uses+xAI+via+the+AI+SDK+and+the+Vercel+Marketplace&products=[{%22type%22:%22integration%22,%22protocol%22:%22ai%22,%22productSlug%22:%22grok%22,%22integrationSlug%22:%22xai%22}]) + +## Running Locally + +1. Clone the repository and install dependencies: + + ```bash + npm install + # or + yarn install + # or + pnpm install + ``` + +2. Install the [Vercel CLI](https://vercel.com/docs/cli): + + ```bash + npm i -g vercel + # or + yarn global add vercel + # or + pnpm install -g vercel + ``` + + Once installed, link your local project to your Vercel project: + + ```bash + vercel link + ``` + + After linking, pull your environment variables: + + ```bash + vercel env pull + ``` + + This will create a `.env.local` file with all the necessary environment variables. + +3. Run the development server: + + ```bash + npm run dev + # or + yarn dev + # or + pnpm dev + ``` + +4. Open [http://localhost:3000](http://localhost:3000) to view your new AI chatbot application. + +## MCP Server Configuration + +This application supports connecting to Model Context Protocol (MCP) servers to access their tools. You can add and manage MCP servers through the settings icon in the chat interface. + +### Adding an MCP Server + +1. Click the settings icon (⚙️) next to the model selector in the chat interface. +2. Enter a name for your MCP server. +3. Select the transport type: + - **SSE (Server-Sent Events)**: For HTTP-based remote servers + - **stdio (Standard I/O)**: For local servers running on the same machine + +#### SSE Configuration + +If you select SSE transport: +1. Enter the server URL (e.g., `https://mcp.example.com/token/sse`) +2. Click "Add Server" + +#### stdio Configuration + +If you select stdio transport: +1. Enter the command to execute (e.g., `node`) +2. Enter the command arguments (e.g., `src/mcp-server.js --port 3001`) + - You can enter space-separated arguments or paste a JSON array +3. Click "Add Server" + +4. Click "Use" to activate the server for the current chat session. + +### Available MCP Servers + +You can use any MCP-compatible server with this application. Here are some examples: + +- [Composio](https://composio.dev) - Provides search, code interpreter, and other tools +- Any local MCP server using stdio transport +- [Any other third-party MCP server] + +### Installing Required Dependencies + +If you're running the project locally and encounter issues with MCP integration, make sure to install the required dependencies: + +```bash +npm run install-mcp +# or +yarn install-mcp +# or +pnpm run install-mcp +``` + +### Creating Your Own MCP Server + +For information on creating your own MCP server, refer to the [Model Context Protocol documentation](https://modelcontextprotocol.io). + +## Authors + +This repository is maintained by the [Vercel](https://vercel.com) team and community contributors. + +Contributions are welcome! Feel free to open issues or submit pull requests to enhance functionality or fix bugs. diff --git a/ai/providers.ts b/ai/providers.ts new file mode 100644 index 0000000000000000000000000000000000000000..ac561f1cd4a8adaa8ea2160fb4802e18c0bc27ee --- /dev/null +++ b/ai/providers.ts @@ -0,0 +1,59 @@ +import { xai } from "@ai-sdk/xai"; +import { openai } from "@ai-sdk/openai"; +import { customProvider } from "ai"; + +export interface ModelInfo { + provider: string; + name: string; + description: string; + apiVersion: string; + capabilities: string[]; +} + +const languageModels = { + "grok-3": xai("grok-3-latest"), + "grok-3-mini": xai("grok-3-mini-fast-latest"), + "gpt-4.1-mini": openai("gpt-4.1-mini"), + "gpt-4.1-nano": openai("gpt-4.1-nano"), +}; + +export const modelDetails: Record = { + "grok-3": { + provider: "xAI", + name: "Grok-3", + description: "Latest version of xAI's flagship model with strong reasoning and coding capabilities.", + apiVersion: "grok-3-latest", + capabilities: ["Balance", "Efficient", "Agentic"] + }, + "grok-3-mini": { + provider: "xAI", + name: "Grok-3 Mini", + description: "Fast, efficient and smaller xAI model with reasoning capabilities.", + apiVersion: "grok-3-mini-fast-latest", + capabilities: ["Fast","Reasoning", "Efficient"] + }, + "gpt-4.1-mini": { + provider: "OpenAI", + name: "GPT-4.1 Mini", + description: "Compact version of OpenAI's GPT-4.1 with good balance of capabilities, including vision.", + apiVersion: "gpt-4.1-mini", + capabilities: [ "Balance", "Creative", "Vision"] + }, + "gpt-4.1-nano": { + provider: "OpenAI", + name: "GPT-4.1 Nano", + description: "Smallest and fastest GPT-4.1 variant designed for efficient rapid responses.", + apiVersion: "gpt-4.1-nano", + capabilities: ["Rapid", "Compact", "Efficient", "Vision"] + }, +}; + +export const model = customProvider({ + languageModels, +}); + +export type modelID = keyof typeof languageModels; + +export const MODELS = Object.keys(languageModels); + +export const defaultModel: modelID = "grok-3-mini"; diff --git a/ai/tools.ts b/ai/tools.ts new file mode 100644 index 0000000000000000000000000000000000000000..1727d2d28d8ed9ea127ba83c05eef15598563d41 --- /dev/null +++ b/ai/tools.ts @@ -0,0 +1,11 @@ +import { VercelAIToolSet } from "composio-core"; + +const toolset = new VercelAIToolSet(); + +export const composioTools = await toolset.getTools( + { + apps: [ + "tavily", + ] + } +); \ No newline at end of file diff --git a/app/actions.ts b/app/actions.ts new file mode 100644 index 0000000000000000000000000000000000000000..487be07f9ee8bd872b35629040eb32d9dcfa3a5d --- /dev/null +++ b/app/actions.ts @@ -0,0 +1,66 @@ +"use server"; + +import { openai } from "@ai-sdk/openai"; +import { generateObject } from "ai"; +import { z } from "zod"; + +// Helper to extract text content from a message regardless of format +function getMessageText(message: any): string { + // Check if the message has parts (new format) + if (message.parts && Array.isArray(message.parts)) { + const textParts = message.parts.filter((p: any) => p.type === 'text' && p.text); + if (textParts.length > 0) { + return textParts.map((p: any) => p.text).join('\n'); + } + } + + // Fallback to content (old format) + if (typeof message.content === 'string') { + return message.content; + } + + // If content is an array (potentially of parts), try to extract text + if (Array.isArray(message.content)) { + const textItems = message.content.filter((item: any) => + typeof item === 'string' || (item.type === 'text' && item.text) + ); + + if (textItems.length > 0) { + return textItems.map((item: any) => + typeof item === 'string' ? item : item.text + ).join('\n'); + } + } + + return ''; +} + +export async function generateTitle(messages: any[]) { + // Convert messages to a format that OpenAI can understand + const normalizedMessages = messages.map(msg => ({ + role: msg.role, + content: getMessageText(msg) + })); + + const { object } = await generateObject({ + model: openai("gpt-4.1"), + schema: z.object({ + title: z.string().min(1).max(100), + }), + system: ` + You are a helpful assistant that generates titles for chat conversations. + The title should be a short description of the conversation. + The title should be no more than 30 characters. + The title should be unique and not generic. + `, + messages: [ + ...normalizedMessages, + { + role: "user", + content: "Generate a title for the conversation.", + }, + ], + }); + + return object.title; +} diff --git a/app/api/chat/route.ts b/app/api/chat/route.ts new file mode 100644 index 0000000000000000000000000000000000000000..af0bcfcb580fc1c553b8a78492a4d0065786cd4f --- /dev/null +++ b/app/api/chat/route.ts @@ -0,0 +1,228 @@ +import { model, type modelID } from "@/ai/providers"; +import { composioTools } from "@/ai/tools"; +import { streamText, type UIMessage } from "ai"; +import { openai } from '@ai-sdk/openai'; +import { appendResponseMessages } from 'ai'; +import { saveChat, saveMessages, convertToDBMessages } from '@/lib/chat-store'; +import { nanoid } from 'nanoid'; +import { db } from '@/lib/db'; +import { messages, chats } from '@/lib/db/schema'; +import { eq, and } from 'drizzle-orm'; + +import { experimental_createMCPClient as createMCPClient, MCPTransport } from 'ai'; +import { Experimental_StdioMCPTransport as StdioMCPTransport } from 'ai/mcp-stdio'; + +// Allow streaming responses up to 30 seconds +export const maxDuration = 30; + +interface KeyValuePair { + key: string; + value: string; +} + +interface MCPServerConfig { + url: string; + type: 'sse' | 'stdio'; + command?: string; + args?: string[]; + env?: KeyValuePair[]; + headers?: KeyValuePair[]; +} + +export async function POST(req: Request) { + const { + messages, + chatId, + selectedModel, + userId, + mcpServers = [], + }: { + messages: UIMessage[]; + chatId?: string; + selectedModel: modelID; + userId: string; + mcpServers?: MCPServerConfig[]; + } = await req.json(); + + if (!userId) { + return new Response( + JSON.stringify({ error: "User ID is required" }), + { status: 400, headers: { "Content-Type": "application/json" } } + ); + } + + const id = chatId || nanoid(); + + // Check if chat already exists for the given ID + // If not, we'll create it in onFinish + let isNewChat = false; + if (chatId) { + try { + const existingChat = await db.query.chats.findFirst({ + where: and( + eq(chats.id, chatId), + eq(chats.userId, userId) + ) + }); + isNewChat = !existingChat; + } catch (error) { + console.error("Error checking for existing chat:", error); + // Continue anyway, we'll create the chat in onFinish + isNewChat = true; + } + } else { + // No ID provided, definitely new + isNewChat = true; + } + + // Initialize tools with Composio tools + let tools = { ...composioTools }; + const mcpClients: any[] = []; + + // Process each MCP server configuration + for (const mcpServer of mcpServers) { + try { + // Create appropriate transport based on type + let transport: MCPTransport | { type: 'sse', url: string, headers?: Record }; + + if (mcpServer.type === 'sse') { + // Convert headers array to object for SSE transport + const headers: Record = {}; + if (mcpServer.headers && mcpServer.headers.length > 0) { + mcpServer.headers.forEach(header => { + if (header.key) headers[header.key] = header.value || ''; + }); + } + + transport = { + type: 'sse' as const, + url: mcpServer.url, + headers: Object.keys(headers).length > 0 ? headers : undefined + }; + } else if (mcpServer.type === 'stdio') { + // For stdio transport, we need command and args + if (!mcpServer.command || !mcpServer.args || mcpServer.args.length === 0) { + console.warn("Skipping stdio MCP server due to missing command or args"); + continue; + } + + // Convert env array to object for stdio transport + const env: Record = {}; + if (mcpServer.env && mcpServer.env.length > 0) { + mcpServer.env.forEach(envVar => { + if (envVar.key) env[envVar.key] = envVar.value || ''; + }); + } + + transport = new StdioMCPTransport({ + command: mcpServer.command, + args: mcpServer.args, + env: Object.keys(env).length > 0 ? env : undefined + }); + } else { + console.warn(`Skipping MCP server with unsupported transport type: ${mcpServer.type}`); + continue; + } + + const mcpClient = await createMCPClient({ transport }); + mcpClients.push(mcpClient); + + const mcptools = await mcpClient.tools(); + + console.log(`MCP tools from ${mcpServer.type} transport:`, Object.keys(mcptools)); + + // Add MCP tools to tools object + tools = { ...tools, ...mcptools }; + } catch (error) { + console.error("Failed to initialize MCP client:", error); + // Continue with other servers instead of failing the entire request + } + } + + // Register cleanup for all clients + if (mcpClients.length > 0) { + req.signal.addEventListener('abort', async () => { + for (const client of mcpClients) { + try { + await client.close(); + } catch (error) { + console.error("Error closing MCP client:", error); + } + } + }); + } + + console.log("messages", messages); + console.log("parts", messages.map(m => m.parts.map(p => p))); + + // If there was an error setting up MCP clients but we at least have composio tools, continue + const result = streamText({ + model: model.languageModel(selectedModel), + system: `You are a helpful assistant with access to a variety of tools. + + The tools are very powerful, and you can use them to answer the user's question. + So choose the tool that is most relevant to the user's question. + + You can use multiple tools in a single response. + Always respond after using the tools for better user experience. + You can run multiple steps using all the tools!!!! + Make sure to use the right tool to respond to the user's question. + + ## Response Format + - Markdown is supported. + - Respond according to tool's response. + - Use the tools to answer the user's question. + - If you don't know the answer, use the tools to find the answer or say you don't know. + `, + messages, + tools, + maxSteps: 20, + onError: (error) => { + console.error(JSON.stringify(error, null, 2)); + }, + async onFinish({ response, steps, toolCalls, toolResults }) { + console.log("onFinish", response.messages.map(m => { + return { + id: m.id, + content: JSON.stringify(m.content), + role: m.role, + } + })); + console.log("steps", steps); + console.log("toolCalls", toolCalls); + console.log("toolResults", toolResults); + + // Combine messages for processing + const allMessages = appendResponseMessages({ + messages, + responseMessages: response.messages, + }); + + // Step 1: Save chat with messages for proper title generation + await saveChat({ + id, + userId, + messages: allMessages, + // No title specified - will be generated + }); + + // Step 2: Save all messages + const dbMessages = convertToDBMessages(allMessages, id); + await saveMessages({ messages: dbMessages }); + } + }); + + result.consumeStream() + return result.toDataStreamResponse({ + sendReasoning: true, + getErrorMessage: (error) => { + if (error instanceof Error) { + if (error.message.includes("Rate limit")) { + return "Rate limit exceeded. Please try again later."; + } + } + console.error(error); + return "An error occurred."; + }, + }); +} diff --git a/app/api/chats/[id]/route.ts b/app/api/chats/[id]/route.ts new file mode 100644 index 0000000000000000000000000000000000000000..c518b3d72efcda8eb536031c428ec4c3bbe12bd4 --- /dev/null +++ b/app/api/chats/[id]/route.ts @@ -0,0 +1,56 @@ +import { NextResponse } from "next/server"; +import { getChatById, deleteChat } from "@/lib/chat-store"; + +interface Params { + params: { + id: string; + }; +} + +export async function GET(request: Request, { params }: Params) { + try { + const userId = request.headers.get('x-user-id'); + + if (!userId) { + return NextResponse.json({ error: "User ID is required" }, { status: 400 }); + } + + const { id } = await params; + const chat = await getChatById(id, userId); + + if (!chat) { + return NextResponse.json( + { error: "Chat not found" }, + { status: 404 } + ); + } + + return NextResponse.json(chat); + } catch (error) { + console.error("Error fetching chat:", error); + return NextResponse.json( + { error: "Failed to fetch chat" }, + { status: 500 } + ); + } +} + +export async function DELETE(request: Request, { params }: Params) { + try { + const userId = request.headers.get('x-user-id'); + + if (!userId) { + return NextResponse.json({ error: "User ID is required" }, { status: 400 }); + } + + const { id } = await params; + await deleteChat(id, userId); + return NextResponse.json({ success: true }); + } catch (error) { + console.error("Error deleting chat:", error); + return NextResponse.json( + { error: "Failed to delete chat" }, + { status: 500 } + ); + } +} \ No newline at end of file diff --git a/app/api/chats/route.ts b/app/api/chats/route.ts new file mode 100644 index 0000000000000000000000000000000000000000..2f4f6b518f6339f233944db77eb8ea08b495cf12 --- /dev/null +++ b/app/api/chats/route.ts @@ -0,0 +1,21 @@ +import { NextResponse } from "next/server"; +import { getChats } from "@/lib/chat-store"; + +export async function GET(request: Request) { + try { + const userId = request.headers.get('x-user-id'); + + if (!userId) { + return NextResponse.json({ error: "User ID is required" }, { status: 400 }); + } + + const chats = await getChats(userId); + return NextResponse.json(chats); + } catch (error) { + console.error("Error fetching chats:", error); + return NextResponse.json( + { error: "Failed to fetch chats" }, + { status: 500 } + ); + } +} \ No newline at end of file diff --git a/app/chat/[id]/page.tsx b/app/chat/[id]/page.tsx new file mode 100644 index 0000000000000000000000000000000000000000..8581be0f0fe838cdb2d41a31f1801e34921ffb11 --- /dev/null +++ b/app/chat/[id]/page.tsx @@ -0,0 +1,53 @@ +"use client"; + +import Chat from "@/components/chat"; +import { getUserId } from "@/lib/user-id"; +import { useQueryClient } from "@tanstack/react-query"; +import { useParams } from "next/navigation"; +import { useEffect } from "react"; + +export default function ChatPage() { + const params = useParams(); + const chatId = params?.id as string; + const queryClient = useQueryClient(); + const userId = getUserId(); + + // Prefetch chat data + useEffect(() => { + async function prefetchChat() { + if (!chatId || !userId) return; + + // Check if data already exists in cache + const existingData = queryClient.getQueryData(['chat', chatId, userId]); + if (existingData) return; + + // Prefetch the data + await queryClient.prefetchQuery({ + queryKey: ['chat', chatId, userId] as const, + queryFn: async () => { + try { + const response = await fetch(`/api/chats/${chatId}`, { + headers: { + 'x-user-id': userId + } + }); + + if (!response.ok) { + throw new Error('Failed to load chat'); + } + + return response.json(); + } catch (error) { + console.error('Error prefetching chat:', error); + return null; + } + }, + staleTime: 1000 * 60 * 5, // 5 minutes + }); + } + + prefetchChat(); + }, [chatId, userId, queryClient]); + + return ; +} \ No newline at end of file diff --git a/app/favicon.ico b/app/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..3f28b1eda4bbfc46c86556c1b32136b141df03cb --- /dev/null +++ b/app/favicon.ico @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fae26f65fd15655032d2a57c8e05d4afb9d3119e9a96759b9240acdbe3bb8026 +size 15406 diff --git a/app/globals.css b/app/globals.css new file mode 100644 index 0000000000000000000000000000000000000000..5a569191d62fdaf4c3412829455e0059dbbc31d5 --- /dev/null +++ b/app/globals.css @@ -0,0 +1,191 @@ +@import "tailwindcss"; + +@plugin "tailwindcss-animate"; + +@custom-variant dark (&:is(.dark *)); + +@theme inline { + --color-background: var(--background); + --color-foreground: var(--foreground); + --font-sans: Montserrat, sans-serif; + --font-mono: Ubuntu Mono, monospace; + --color-sidebar-ring: var(--sidebar-ring); + --color-sidebar-border: var(--sidebar-border); + --color-sidebar-accent-foreground: var(--sidebar-accent-foreground); + --color-sidebar-accent: var(--sidebar-accent); + --color-sidebar-primary-foreground: var(--sidebar-primary-foreground); + --color-sidebar-primary: var(--sidebar-primary); + --color-sidebar-foreground: var(--sidebar-foreground); + --color-sidebar: var(--sidebar); + --color-chart-5: var(--chart-5); + --color-chart-4: var(--chart-4); + --color-chart-3: var(--chart-3); + --color-chart-2: var(--chart-2); + --color-chart-1: var(--chart-1); + --color-ring: var(--ring); + --color-input: var(--input); + --color-border: var(--border); + --color-destructive-foreground: var(--destructive-foreground); + --color-destructive: var(--destructive); + --color-accent-foreground: var(--accent-foreground); + --color-accent: var(--accent); + --color-muted-foreground: var(--muted-foreground); + --color-muted: var(--muted); + --color-secondary-foreground: var(--secondary-foreground); + --color-secondary: var(--secondary); + --color-primary-foreground: var(--primary-foreground); + --color-primary: var(--primary); + --color-popover-foreground: var(--popover-foreground); + --color-popover: var(--popover); + --color-card-foreground: var(--card-foreground); + --color-card: var(--card); + --radius-sm: calc(var(--radius) - 4px); + --radius-md: calc(var(--radius) - 2px); + --radius-lg: var(--radius); + --radius-xl: calc(var(--radius) + 4px); + --font-serif: Merriweather, serif; + --radius: 0.625rem; + --tracking-tighter: calc(var(--tracking-normal) - 0.05em); + --tracking-tight: calc(var(--tracking-normal) - 0.025em); + --tracking-wide: calc(var(--tracking-normal) + 0.025em); + --tracking-wider: calc(var(--tracking-normal) + 0.05em); + --tracking-widest: calc(var(--tracking-normal) + 0.1em); + --tracking-normal: var(--tracking-normal); + --shadow-2xl: var(--shadow-2xl); + --shadow-xl: var(--shadow-xl); + --shadow-lg: var(--shadow-lg); + --shadow-md: var(--shadow-md); + --shadow: var(--shadow); + --shadow-sm: var(--shadow-sm); + --shadow-xs: var(--shadow-xs); + --shadow-2xs: var(--shadow-2xs); + --spacing: var(--spacing); + --letter-spacing: var(--letter-spacing); + --shadow-offset-y: var(--shadow-offset-y); + --shadow-offset-x: var(--shadow-offset-x); + --shadow-spread: var(--shadow-spread); + --shadow-blur: var(--shadow-blur); + --shadow-opacity: var(--shadow-opacity); + --color-shadow-color: var(--shadow-color); +} + +:root { + --background: oklch(0.99 0.01 56.32); + --foreground: oklch(0.34 0.01 2.77); + --card: oklch(1.00 0 0); + --card-foreground: oklch(0.34 0.01 2.77); + --popover: oklch(1.00 0 0); + --popover-foreground: oklch(0.34 0.01 2.77); + --primary: oklch(0.74 0.16 34.71); + --primary-foreground: oklch(1.00 0 0); + --secondary: oklch(0.96 0.02 28.90); + --secondary-foreground: oklch(0.56 0.13 32.74); + --muted: oklch(0.97 0.02 39.40); + --muted-foreground: oklch(0.49 0.05 26.45); + --accent: oklch(0.83 0.11 58.00); + --accent-foreground: oklch(0.34 0.01 2.77); + --destructive: oklch(0.61 0.21 22.24); + --destructive-foreground: oklch(1.00 0 0); + --border: oklch(0.93 0.04 38.69); + --input: oklch(0.93 0.04 38.69); + --ring: oklch(0.74 0.16 34.71); + --chart-1: oklch(0.74 0.16 34.71); + --chart-2: oklch(0.83 0.11 58.00); + --chart-3: oklch(0.88 0.08 54.93); + --chart-4: oklch(0.82 0.11 40.89); + --chart-5: oklch(0.64 0.13 32.07); + --radius: 0.625rem; + --sidebar: oklch(0.97 0.02 39.40); + --sidebar-foreground: oklch(0.34 0.01 2.77); + --sidebar-primary: oklch(0.74 0.16 34.71); + --sidebar-primary-foreground: oklch(1.00 0 0); + --sidebar-accent: oklch(0.83 0.11 58.00); + --sidebar-accent-foreground: oklch(0.34 0.01 2.77); + --sidebar-border: oklch(0.93 0.04 38.69); + --sidebar-ring: oklch(0.74 0.16 34.71); + --font-sans: Montserrat, sans-serif; + --font-serif: Merriweather, serif; + --font-mono: Ubuntu Mono, monospace; + --shadow-color: hsl(0 0% 0%); + --shadow-opacity: 0.09; + --shadow-blur: 12px; + --shadow-spread: -3px; + --shadow-offset-x: 0px; + --shadow-offset-y: 6px; + --letter-spacing: 0em; + --spacing: 0.25rem; + --shadow-2xs: 0px 6px 12px -3px hsl(0 0% 0% / 0.04); + --shadow-xs: 0px 6px 12px -3px hsl(0 0% 0% / 0.04); + --shadow-sm: 0px 6px 12px -3px hsl(0 0% 0% / 0.09), 0px 1px 2px -4px hsl(0 0% 0% / 0.09); + --shadow: 0px 6px 12px -3px hsl(0 0% 0% / 0.09), 0px 1px 2px -4px hsl(0 0% 0% / 0.09); + --shadow-md: 0px 6px 12px -3px hsl(0 0% 0% / 0.09), 0px 2px 4px -4px hsl(0 0% 0% / 0.09); + --shadow-lg: 0px 6px 12px -3px hsl(0 0% 0% / 0.09), 0px 4px 6px -4px hsl(0 0% 0% / 0.09); + --shadow-xl: 0px 6px 12px -3px hsl(0 0% 0% / 0.09), 0px 8px 10px -4px hsl(0 0% 0% / 0.09); + --shadow-2xl: 0px 6px 12px -3px hsl(0 0% 0% / 0.22); + --tracking-normal: 0em; +} + +.dark { + --background: oklch(0.26 0.02 352.40); + --foreground: oklch(0.94 0.01 51.32); + --card: oklch(0.32 0.02 341.45); + --card-foreground: oklch(0.94 0.01 51.32); + --popover: oklch(0.32 0.02 341.45); + --popover-foreground: oklch(0.94 0.01 51.32); + --primary: oklch(0.74 0.16 34.71); + --primary-foreground: oklch(1.00 0 0); + --secondary: oklch(0.36 0.02 342.27); + --secondary-foreground: oklch(0.94 0.01 51.32); + --muted: oklch(0.32 0.02 341.45); + --muted-foreground: oklch(0.84 0.02 52.63); + --accent: oklch(0.83 0.11 58.00); + --accent-foreground: oklch(0.26 0.02 352.40); + --destructive: oklch(0.61 0.21 22.24); + --destructive-foreground: oklch(1.00 0 0); + --border: oklch(0.36 0.02 342.27); + --input: oklch(0.36 0.02 342.27); + --ring: oklch(0.74 0.16 34.71); + --chart-1: oklch(0.74 0.16 34.71); + --chart-2: oklch(0.83 0.11 58.00); + --chart-3: oklch(0.88 0.08 54.93); + --chart-4: oklch(0.82 0.11 40.89); + --chart-5: oklch(0.64 0.13 32.07); + --sidebar: oklch(0.26 0.02 352.40); + --sidebar-foreground: oklch(0.94 0.01 51.32); + --sidebar-primary: oklch(0.74 0.16 34.71); + --sidebar-primary-foreground: oklch(1.00 0 0); + --sidebar-accent: oklch(0.83 0.11 58.00); + --sidebar-accent-foreground: oklch(0.26 0.02 352.40); + --sidebar-border: oklch(0.36 0.02 342.27); + --sidebar-ring: oklch(0.74 0.16 34.71); + --radius: 0.625rem; + --font-sans: Montserrat, sans-serif; + --font-serif: Merriweather, serif; + --font-mono: Ubuntu Mono, monospace; + --shadow-color: hsl(0 0% 0%); + --shadow-opacity: 0.09; + --shadow-blur: 12px; + --shadow-spread: -3px; + --shadow-offset-x: 0px; + --shadow-offset-y: 6px; + --letter-spacing: 0em; + --spacing: 0.25rem; + --shadow-2xs: 0px 6px 12px -3px hsl(0 0% 0% / 0.04); + --shadow-xs: 0px 6px 12px -3px hsl(0 0% 0% / 0.04); + --shadow-sm: 0px 6px 12px -3px hsl(0 0% 0% / 0.09), 0px 1px 2px -4px hsl(0 0% 0% / 0.09); + --shadow: 0px 6px 12px -3px hsl(0 0% 0% / 0.09), 0px 1px 2px -4px hsl(0 0% 0% / 0.09); + --shadow-md: 0px 6px 12px -3px hsl(0 0% 0% / 0.09), 0px 2px 4px -4px hsl(0 0% 0% / 0.09); + --shadow-lg: 0px 6px 12px -3px hsl(0 0% 0% / 0.09), 0px 4px 6px -4px hsl(0 0% 0% / 0.09); + --shadow-xl: 0px 6px 12px -3px hsl(0 0% 0% / 0.09), 0px 8px 10px -4px hsl(0 0% 0% / 0.09); + --shadow-2xl: 0px 6px 12px -3px hsl(0 0% 0% / 0.22); +} + +@layer base { + * { + @apply border-border outline-ring/50; + } + body { + @apply bg-background text-foreground; + letter-spacing: var(--tracking-normal); + } +} \ No newline at end of file diff --git a/app/layout.tsx b/app/layout.tsx new file mode 100644 index 0000000000000000000000000000000000000000..2327d08c77acf43bbb4cab791ad4c5c4cdd0a447 --- /dev/null +++ b/app/layout.tsx @@ -0,0 +1,44 @@ +import type { Metadata } from "next"; +import { Inter } from "next/font/google"; +import { ChatSidebar } from "@/components/chat-sidebar"; +import { SidebarTrigger } from "@/components/ui/sidebar"; +import { Menu } from "lucide-react"; +import { Providers } from "./providers"; +import "./globals.css"; + +const inter = Inter({ subsets: ["latin"] }); + +export const metadata: Metadata = { + title: "Scira MCP Chat", + description: "Scira MCP Chat is a chat interface for interacting with MCP servers.", +}; + +export default function RootLayout({ + children, +}: Readonly<{ + children: React.ReactNode; +}>) { + return ( + + + +
+ +
+
+ + + +
+
+ {children} +
+
+
+
+ + + ); +} diff --git a/app/page.tsx b/app/page.tsx new file mode 100644 index 0000000000000000000000000000000000000000..4303bf4486e446b5903eb278fb60bedb898cadd4 --- /dev/null +++ b/app/page.tsx @@ -0,0 +1,5 @@ +import Chat from "@/components/chat"; + +export default function Page() { + return ; +} diff --git a/app/providers.tsx b/app/providers.tsx new file mode 100644 index 0000000000000000000000000000000000000000..09850250e3989e7b63b81ba867aef36d9af8ffb6 --- /dev/null +++ b/app/providers.tsx @@ -0,0 +1,35 @@ +"use client"; + +import { ReactNode } from "react"; +import { ThemeProvider } from "@/components/theme-provider"; +import { SidebarProvider } from "@/components/ui/sidebar"; +import { Toaster } from "sonner"; +import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; + +// Create a client +const queryClient = new QueryClient({ + defaultOptions: { + queries: { + staleTime: 1000 * 60 * 5, // 5 minutes + refetchOnWindowFocus: true, + }, + }, +}); + +export function Providers({ children }: { children: ReactNode }) { + return ( + + + + {children} + + + + + ); +} \ No newline at end of file diff --git a/components.json b/components.json new file mode 100644 index 0000000000000000000000000000000000000000..5a3c7506d04ec327bedd759a52abc127316cc1d1 --- /dev/null +++ b/components.json @@ -0,0 +1,21 @@ +{ + "$schema": "https://ui.shadcn.com/schema.json", + "style": "new-york", + "rsc": true, + "tsx": true, + "tailwind": { + "config": "", + "css": "app/globals.css", + "baseColor": "zinc", + "cssVariables": true, + "prefix": "" + }, + "aliases": { + "components": "@/components", + "utils": "@/lib/utils", + "ui": "@/components/ui", + "lib": "@/lib", + "hooks": "@/hooks" + }, + "iconLibrary": "lucide" +} \ No newline at end of file diff --git a/components/chat-sidebar.tsx b/components/chat-sidebar.tsx new file mode 100644 index 0000000000000000000000000000000000000000..a9100f2a58d7e0c3d29dfeede6846e9157de1b70 --- /dev/null +++ b/components/chat-sidebar.tsx @@ -0,0 +1,272 @@ +"use client"; + +import { useState, useEffect } from "react"; +import { useRouter, usePathname } from "next/navigation"; +import { MessageSquare, PlusCircle, Trash2, ServerIcon, Settings, Loader2, Sparkles } from "lucide-react"; +import { + Sidebar, + SidebarContent, + SidebarFooter, + SidebarGroup, + SidebarGroupContent, + SidebarGroupLabel, + SidebarHeader, + SidebarMenu, + SidebarMenuButton, + SidebarMenuItem, + SidebarMenuBadge, + SidebarSeparator, + useSidebar +} from "@/components/ui/sidebar"; +import { Separator } from "@/components/ui/separator"; +import { Button } from "@/components/ui/button"; +import { Badge } from "@/components/ui/badge"; +import { toast } from "sonner"; +import { type Chat } from "@/lib/db/schema"; +import Image from "next/image"; +import { MCPServerManager, type MCPServer } from "./mcp-server-manager"; +import { ThemeToggle } from "./theme-toggle"; +import { useTheme } from "next-themes"; +import { getUserId } from "@/lib/user-id"; +import { useLocalStorage } from "@/lib/hooks/use-local-storage"; +import { STORAGE_KEYS } from "@/lib/constants"; +import { useChats } from "@/lib/hooks/use-chats"; +import { cn } from "@/lib/utils"; +import Link from "next/link"; + +export function ChatSidebar() { + const router = useRouter(); + const pathname = usePathname(); + const [userId, setUserId] = useState(''); + const [mcpServers, setMcpServers] = useLocalStorage(STORAGE_KEYS.MCP_SERVERS, []); + const [selectedMcpServers, setSelectedMcpServers] = useLocalStorage(STORAGE_KEYS.SELECTED_MCP_SERVERS, []); + const [mcpSettingsOpen, setMcpSettingsOpen] = useState(false); + const { state } = useSidebar(); + const isCollapsed = state === "collapsed"; + + // Initialize userId + useEffect(() => { + setUserId(getUserId()); + }, []); + + // Use TanStack Query to fetch chats + const { chats, isLoading, deleteChat, refreshChats } = useChats(userId); + + // Start a new chat + const handleNewChat = () => { + router.push('/'); + }; + + // Delete a chat + const handleDeleteChat = async (chatId: string, e: React.MouseEvent) => { + e.stopPropagation(); + e.preventDefault(); + + deleteChat(chatId); + + // If we're currently on the deleted chat's page, navigate to home + if (pathname === `/chat/${chatId}`) { + router.push('/'); + } + }; + + // Get active MCP servers status + const activeServersCount = selectedMcpServers.length; + const multipleServersActive = activeServersCount > 1; + + // Show loading state if user ID is not yet initialized + if (!userId) { + return null; // Or a loading spinner + } + + return ( + + +
+
+
+ Scira Logo +
+ {!isCollapsed && ( +
MCP
+ )} +
+
+
+ + + + + Chats + + + + {isLoading ? ( +
+ + {!isCollapsed && ( + Loading... + )} +
+ ) : chats.length === 0 ? ( +
+ {isCollapsed ? ( +
+ +
+ ) : ( +
+
+ +
+ No chats yet + + Start a new conversation below + +
+ )} +
+ ) : ( + chats.map((chat) => ( + + + +
+ + {!isCollapsed && ( + + {chat.title.length > 18 ? `${chat.title.slice(0, 18)}...` : chat.title} + + )} +
+ {!isCollapsed && ( + + )} + +
+
+ )) + )} +
+
+
+ +
+
+ +
+
+ + + + MCP Servers + + + + + setMcpSettingsOpen(true)} + className={cn( + "w-full flex items-center gap-2 transition-all", + "hover:bg-secondary/50 active:bg-secondary/70" + )} + tooltip={isCollapsed ? "MCP Servers" : undefined} + > + 0 ? "text-green-500" : "text-muted-foreground" + )} /> + {!isCollapsed && ( + MCP Servers + )} + {activeServersCount > 0 && !isCollapsed ? ( + + {activeServersCount} + + ) : activeServersCount > 0 && isCollapsed ? ( + + {activeServersCount} + + ) : null} + + + + + +
+ + +
+ + +
+ + +
+
+ + +
+
+ ); +} \ No newline at end of file diff --git a/components/chat.tsx b/components/chat.tsx new file mode 100644 index 0000000000000000000000000000000000000000..72eb1ee1a4ce386351165b812297991c9c936c53 --- /dev/null +++ b/components/chat.tsx @@ -0,0 +1,227 @@ +"use client"; + +import { defaultModel, type modelID } from "@/ai/providers"; +import { Message, useChat } from "@ai-sdk/react"; +import { useState, useEffect, useMemo, useCallback } from "react"; +import { Textarea } from "./textarea"; +import { ProjectOverview } from "./project-overview"; +import { Messages } from "./messages"; +import { toast } from "sonner"; +import { useRouter, useParams } from "next/navigation"; +import { getUserId } from "@/lib/user-id"; +import { useLocalStorageValue, useLocalStorage } from "@/lib/hooks/use-local-storage"; +import { STORAGE_KEYS } from "@/lib/constants"; +import { useQuery, useQueryClient } from "@tanstack/react-query"; +import { convertToUIMessages } from "@/lib/chat-store"; +import { type Message as DBMessage } from "@/lib/db/schema"; +import { nanoid } from "nanoid"; + +// Define types for MCP server +interface KeyValuePair { + key: string; + value: string; +} + +interface MCPServer { + id: string; + name: string; + url: string; + type: 'sse' | 'stdio'; + command?: string; + args?: string[]; + env?: KeyValuePair[]; + headers?: KeyValuePair[]; +} + + +interface ChatData { + id: string; + messages: DBMessage[]; + createdAt: string; + updatedAt: string; +} + +export default function Chat() { + const router = useRouter(); + const params = useParams(); + const chatId = params?.id as string | undefined; + const queryClient = useQueryClient(); + + const [selectedModel, setSelectedModel] = useLocalStorage("selectedModel", defaultModel); + const [userId, setUserId] = useState(''); + const [generatedChatId, setGeneratedChatId] = useState(''); + + // Get MCP server data from localStorage via our custom hooks + const mcpServers = useLocalStorageValue(STORAGE_KEYS.MCP_SERVERS, []); + const selectedMcpServers = useLocalStorageValue(STORAGE_KEYS.SELECTED_MCP_SERVERS, []); + + // Initialize userId + useEffect(() => { + setUserId(getUserId()); + }, []); + + // Generate a chat ID if needed + useEffect(() => { + if (!chatId) { + setGeneratedChatId(nanoid()); + } + }, [chatId]); + + // Use React Query to fetch chat history + const { data: chatData, isLoading: isLoadingChat } = useQuery({ + queryKey: ['chat', chatId, userId] as const, + queryFn: async ({ queryKey }) => { + const [_, chatId, userId] = queryKey; + if (!chatId || !userId) return null; + + try { + const response = await fetch(`/api/chats/${chatId}`, { + headers: { + 'x-user-id': userId + } + }); + + if (!response.ok) { + throw new Error('Failed to load chat'); + } + + const data = await response.json(); + return data as ChatData; + } catch (error) { + console.error('Error loading chat history:', error); + toast.error('Failed to load chat history'); + throw error; + } + }, + enabled: !!chatId && !!userId, + retry: 1, + staleTime: 1000 * 60 * 5, // 5 minutes + refetchOnWindowFocus: false + }); + + // Memoize MCP server configuration for API + const mcpServersForApi = useMemo(() => { + if (!selectedMcpServers.length) return []; + + return selectedMcpServers + .map(id => mcpServers.find(server => server.id === id)) + .filter((server): server is MCPServer => Boolean(server)) + .map(server => ({ + type: server.type, + url: server.url, + command: server.command, + args: server.args, + env: server.env, + headers: server.headers + })); + }, [mcpServers, selectedMcpServers]); + + // Prepare initial messages from query data + const initialMessages = useMemo(() => { + if (!chatData || !chatData.messages || chatData.messages.length === 0) { + return []; + } + + // Convert DB messages to UI format, then ensure it matches the Message type from @ai-sdk/react + const uiMessages = convertToUIMessages(chatData.messages); + return uiMessages.map(msg => ({ + id: msg.id, + role: msg.role as Message['role'], // Ensure role is properly typed + content: msg.content, + parts: msg.parts, + } as Message)); + }, [chatData]); + + const { messages, input, handleInputChange, handleSubmit, status, stop } = + useChat({ + id: chatId || generatedChatId, // Use generated ID if no chatId in URL + initialMessages, + maxSteps: 20, + body: { + selectedModel, + mcpServers: mcpServersForApi, + chatId: chatId || generatedChatId, // Use generated ID if no chatId in URL + userId, + }, + experimental_throttle: 500, + onFinish: () => { + // Invalidate the chats query to refresh the sidebar + if (userId) { + queryClient.invalidateQueries({ queryKey: ['chats', userId] }); + } + }, + onError: (error) => { + toast.error( + error.message.length > 0 + ? error.message + : "An error occured, please try again later.", + { position: "top-center", richColors: true }, + ); + }, + }); + + // Custom submit handler + const handleFormSubmit = useCallback((e: React.FormEvent) => { + e.preventDefault(); + + if (!chatId && generatedChatId && input.trim()) { + // If this is a new conversation, redirect to the chat page with the generated ID + const effectiveChatId = generatedChatId; + + // Submit the form + handleSubmit(e); + + // Redirect to the chat page with the generated ID + router.push(`/chat/${effectiveChatId}`); + } else { + // Normal submission for existing chats + handleSubmit(e); + } + }, [chatId, generatedChatId, input, handleSubmit, router]); + + const isLoading = status === "streaming" || status === "submitted" || isLoadingChat; + + return ( +
+ {messages.length === 0 && !isLoadingChat ? ( +
+ +
+