Spaces:
Running
Running
feat: Implement message saving functionality with debouncing in Chat component
Browse files- app/api/chats/[id]/messages/route.ts +39 -0
- components/chat.tsx +32 -1
- lib/models.ts +4 -15
app/api/chats/[id]/messages/route.ts
ADDED
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { NextResponse } from "next/server";
|
2 |
+
import { saveMessages, convertToDBMessages } from "@/lib/chat-store";
|
3 |
+
|
4 |
+
interface Params {
|
5 |
+
params: {
|
6 |
+
id: string;
|
7 |
+
};
|
8 |
+
}
|
9 |
+
|
10 |
+
export async function POST(request: Request, { params }: Params) {
|
11 |
+
try {
|
12 |
+
const userId = request.headers.get('x-user-id');
|
13 |
+
|
14 |
+
if (!userId) {
|
15 |
+
return NextResponse.json({ error: "User ID is required" }, { status: 400 });
|
16 |
+
}
|
17 |
+
|
18 |
+
const { id } = await params;
|
19 |
+
const { messages } = await request.json();
|
20 |
+
|
21 |
+
if (!messages || !Array.isArray(messages)) {
|
22 |
+
return NextResponse.json({ error: "Messages array is required" }, { status: 400 });
|
23 |
+
}
|
24 |
+
|
25 |
+
// Convert messages to DB format
|
26 |
+
const dbMessages = convertToDBMessages(messages, id);
|
27 |
+
|
28 |
+
// Save messages to database
|
29 |
+
await saveMessages({ messages: dbMessages });
|
30 |
+
|
31 |
+
return NextResponse.json({ success: true });
|
32 |
+
} catch (error) {
|
33 |
+
console.error("Error saving messages:", error);
|
34 |
+
return NextResponse.json(
|
35 |
+
{ error: "Failed to save messages" },
|
36 |
+
{ status: 500 }
|
37 |
+
);
|
38 |
+
}
|
39 |
+
}
|
components/chat.tsx
CHANGED
@@ -134,7 +134,7 @@ export default function Chat() {
|
|
134 |
toast.error(
|
135 |
error.message.length > 0
|
136 |
? error.message
|
137 |
-
: "An error
|
138 |
{ position: "top-center", richColors: true },
|
139 |
);
|
140 |
},
|
@@ -159,6 +159,37 @@ export default function Chat() {
|
|
159 |
}
|
160 |
}, [chatId, generatedChatId, input, handleSubmit, router]);
|
161 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
162 |
const isLoading = status === "streaming" || status === "submitted" || isLoadingChat;
|
163 |
|
164 |
return (
|
|
|
134 |
toast.error(
|
135 |
error.message.length > 0
|
136 |
? error.message
|
137 |
+
: "An error occurred, please try again later.",
|
138 |
{ position: "top-center", richColors: true },
|
139 |
);
|
140 |
},
|
|
|
159 |
}
|
160 |
}, [chatId, generatedChatId, input, handleSubmit, router]);
|
161 |
|
162 |
+
// Save messages whenever they change (with debouncing)
|
163 |
+
useEffect(() => {
|
164 |
+
const effectiveChatId = chatId || generatedChatId;
|
165 |
+
|
166 |
+
if (!userId || !effectiveChatId || messages.length === 0) {
|
167 |
+
return;
|
168 |
+
}
|
169 |
+
|
170 |
+
// Debounce message saving to avoid too many requests
|
171 |
+
const timeoutId = setTimeout(async () => {
|
172 |
+
try {
|
173 |
+
const response = await fetch(`/api/chats/${effectiveChatId}/messages`, {
|
174 |
+
method: 'POST',
|
175 |
+
headers: {
|
176 |
+
'Content-Type': 'application/json',
|
177 |
+
'x-user-id': userId,
|
178 |
+
},
|
179 |
+
body: JSON.stringify({ messages }),
|
180 |
+
});
|
181 |
+
|
182 |
+
if (!response.ok) {
|
183 |
+
console.error('Failed to save messages:', await response.text());
|
184 |
+
}
|
185 |
+
} catch (error) {
|
186 |
+
console.error('Failed to save messages:', error);
|
187 |
+
}
|
188 |
+
}, 1000); // Save after 1 second of no changes
|
189 |
+
|
190 |
+
return () => clearTimeout(timeoutId);
|
191 |
+
}, [messages, chatId, generatedChatId, userId]);
|
192 |
+
|
193 |
const isLoading = status === "streaming" || status === "submitted" || isLoadingChat;
|
194 |
|
195 |
return (
|
lib/models.ts
CHANGED
@@ -10,22 +10,11 @@ export async function getModels(): Promise<string[]> {
|
|
10 |
if (modelsCache) {
|
11 |
return modelsCache;
|
12 |
}
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
const modelIds = data.data
|
17 |
-
.filter((model: any) => model.id !== "THUDM/GLM-4.1V-9B-Thinking")
|
18 |
-
.slice(0, 5)
|
19 |
-
.map((model: any) => model.id);
|
20 |
-
modelsCache = modelIds;
|
21 |
-
return modelIds;
|
22 |
-
} catch (e) {
|
23 |
-
console.error(e);
|
24 |
-
return [];
|
25 |
-
}
|
26 |
}
|
27 |
|
28 |
export async function getDefaultModel(): Promise<ModelID> {
|
29 |
-
|
30 |
-
return models[0] ?? "";
|
31 |
}
|
|
|
10 |
if (modelsCache) {
|
11 |
return modelsCache;
|
12 |
}
|
13 |
+
// Hardcoded to use only SmolLM3-3B model
|
14 |
+
modelsCache = ["HuggingFaceTB/SmolLM3-3B"];
|
15 |
+
return modelsCache;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
}
|
17 |
|
18 |
export async function getDefaultModel(): Promise<ModelID> {
|
19 |
+
return "HuggingFaceTB/SmolLM3-3B";
|
|
|
20 |
}
|