Spaces:
Sleeping
Sleeping
| import { d as private_env } from './shared-server-49TKSBDM.js'; | |
| import { c as redirect, b as base } from './index-JNnR1J8_.js'; | |
| import { L as LlamaCppService } from './LlamaCppService-lwZ2ZT0u.js'; | |
| import 'fs'; | |
| import 'path'; | |
| import 'node:dns'; | |
| const POST = async ({ locals, request }) => { | |
| const body = await request.json(); | |
| const abortController = new AbortController(); | |
| let llmService = new LlamaCppService(private_env.LLM_API_URL); | |
| let llmGenerator = await llmService.conversation(body.history, { abortController }); | |
| const stream = new ReadableStream({ | |
| async start(controller) { | |
| try { | |
| for await (const output of await llmGenerator({ history: body.history })) { | |
| controller.enqueue(output.token.text); | |
| } | |
| } catch (error2) { | |
| if (error2.name === "AbortError") { | |
| console.log("Request was aborted during LLMServer prediction."); | |
| } else { | |
| console.error("Error during LLMServer prediction:", error2); | |
| } | |
| } | |
| controller.close(); | |
| }, | |
| cancel() { | |
| console.log("ReadableStream canceled and aborted"); | |
| abortController.abort(); | |
| } | |
| }); | |
| return new Response(stream, { | |
| headers: { | |
| "content-type": "text/event-stream" | |
| } | |
| }); | |
| }; | |
| const GET = async () => { | |
| throw redirect(302, `${base}/`); | |
| }; | |
| export { GET, POST }; | |
| //# sourceMappingURL=_server.ts-ME-xAf93.js.map | |