Connor Fogarty
commited on
feat: add support for message continuation (#1)
Browse files
packages/bolt/app/lib/.server/llm/constants.ts
CHANGED
@@ -1,2 +1,5 @@
|
|
1 |
// see https://docs.anthropic.com/en/docs/about-claude/models
|
2 |
export const MAX_TOKENS = 8192;
|
|
|
|
|
|
|
|
1 |
// see https://docs.anthropic.com/en/docs/about-claude/models
|
2 |
export const MAX_TOKENS = 8192;
|
3 |
+
|
4 |
+
// limits the number of model responses that can be returned in a single request
|
5 |
+
export const MAX_RESPONSE_SEGMENTS = 2;
|
packages/bolt/app/lib/.server/llm/prompts.ts
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
import { WORK_DIR } from '../../../utils/constants';
|
|
|
2 |
|
3 |
export const getSystemPrompt = (cwd: string = WORK_DIR) => `
|
4 |
You are Bolt, an expert AI assistant and exceptional senior software developer with vast knowledge across multiple programming languages, frameworks, and best practices.
|
@@ -198,3 +199,8 @@ Here are some examples of correct usage of artifacts:
|
|
198 |
</example>
|
199 |
</examples>
|
200 |
`;
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import { WORK_DIR } from '../../../utils/constants';
|
2 |
+
import { stripIndents } from '../../../utils/stripIndent';
|
3 |
|
4 |
export const getSystemPrompt = (cwd: string = WORK_DIR) => `
|
5 |
You are Bolt, an expert AI assistant and exceptional senior software developer with vast knowledge across multiple programming languages, frameworks, and best practices.
|
|
|
199 |
</example>
|
200 |
</examples>
|
201 |
`;
|
202 |
+
|
203 |
+
export const CONTINUE_PROMPT = stripIndents`
|
204 |
+
Continue your prior response. IMPORTANT: Immediately begin from where you left off without any interruptions.
|
205 |
+
Do not repeat any content, including artifact and action tags.
|
206 |
+
`;
|
packages/bolt/app/lib/.server/llm/switchable-stream.ts
ADDED
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
export default class SwitchableStream extends TransformStream {
|
2 |
+
private _controller: TransformStreamDefaultController | null = null;
|
3 |
+
private _currentReader: ReadableStreamDefaultReader | null = null;
|
4 |
+
private _switches = 0;
|
5 |
+
|
6 |
+
constructor() {
|
7 |
+
let controllerRef: TransformStreamDefaultController | undefined;
|
8 |
+
|
9 |
+
super({
|
10 |
+
start(controller) {
|
11 |
+
controllerRef = controller;
|
12 |
+
},
|
13 |
+
});
|
14 |
+
|
15 |
+
if (controllerRef === undefined) {
|
16 |
+
throw new Error('Controller not properly initialized');
|
17 |
+
}
|
18 |
+
|
19 |
+
this._controller = controllerRef;
|
20 |
+
}
|
21 |
+
|
22 |
+
async switchSource(newStream: ReadableStream) {
|
23 |
+
if (this._currentReader) {
|
24 |
+
await this._currentReader.cancel();
|
25 |
+
}
|
26 |
+
|
27 |
+
this._currentReader = newStream.getReader();
|
28 |
+
|
29 |
+
this._pumpStream();
|
30 |
+
|
31 |
+
this._switches++;
|
32 |
+
}
|
33 |
+
|
34 |
+
private async _pumpStream() {
|
35 |
+
if (!this._currentReader || !this._controller) {
|
36 |
+
throw new Error('Stream is not properly initialized');
|
37 |
+
}
|
38 |
+
|
39 |
+
try {
|
40 |
+
while (true) {
|
41 |
+
const { done, value } = await this._currentReader.read();
|
42 |
+
|
43 |
+
if (done) {
|
44 |
+
break;
|
45 |
+
}
|
46 |
+
|
47 |
+
this._controller.enqueue(value);
|
48 |
+
}
|
49 |
+
} catch (error) {
|
50 |
+
this._controller.error(error);
|
51 |
+
}
|
52 |
+
}
|
53 |
+
|
54 |
+
close() {
|
55 |
+
if (this._currentReader) {
|
56 |
+
this._currentReader.cancel();
|
57 |
+
}
|
58 |
+
|
59 |
+
this._controller?.terminate();
|
60 |
+
}
|
61 |
+
|
62 |
+
get switches() {
|
63 |
+
return this._switches;
|
64 |
+
}
|
65 |
+
}
|
packages/bolt/app/routes/api.chat.ts
CHANGED
@@ -1,12 +1,40 @@
|
|
1 |
import { type ActionFunctionArgs } from '@remix-run/cloudflare';
|
2 |
-
import {
|
|
|
|
|
|
|
|
|
3 |
|
4 |
export async function action({ context, request }: ActionFunctionArgs) {
|
5 |
const { messages } = await request.json<{ messages: Messages }>();
|
|
|
6 |
|
7 |
try {
|
8 |
-
const
|
9 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
} catch (error) {
|
11 |
console.log(error);
|
12 |
|
|
|
1 |
import { type ActionFunctionArgs } from '@remix-run/cloudflare';
|
2 |
+
import { MAX_RESPONSE_SEGMENTS } from '../lib/.server/llm/constants';
|
3 |
+
import { CONTINUE_PROMPT } from '../lib/.server/llm/prompts';
|
4 |
+
import { streamText, type Messages, type StreamingOptions } from '../lib/.server/llm/stream-text';
|
5 |
+
import SwitchableStream from '../lib/.server/llm/switchable-stream';
|
6 |
+
import { StreamingTextResponse } from 'ai';
|
7 |
|
8 |
export async function action({ context, request }: ActionFunctionArgs) {
|
9 |
const { messages } = await request.json<{ messages: Messages }>();
|
10 |
+
const stream = new SwitchableStream();
|
11 |
|
12 |
try {
|
13 |
+
const options: StreamingOptions = {
|
14 |
+
toolChoice: 'none',
|
15 |
+
onFinish: async ({ text: content, finishReason }) => {
|
16 |
+
if (finishReason !== 'length') {
|
17 |
+
return stream.close();
|
18 |
+
}
|
19 |
+
|
20 |
+
if (stream.switches >= MAX_RESPONSE_SEGMENTS) {
|
21 |
+
throw Error('Cannot continue message: maximum segments reached');
|
22 |
+
}
|
23 |
+
|
24 |
+
messages.push({ role: 'assistant', content });
|
25 |
+
messages.push({ role: 'user', content: CONTINUE_PROMPT });
|
26 |
+
|
27 |
+
const result = await streamText(messages, context.cloudflare.env, options);
|
28 |
+
|
29 |
+
return stream.switchSource(result.toAIStream());
|
30 |
+
},
|
31 |
+
};
|
32 |
+
|
33 |
+
const result = await streamText(messages, context.cloudflare.env, options);
|
34 |
+
|
35 |
+
stream.switchSource(result.toAIStream());
|
36 |
+
|
37 |
+
return new StreamingTextResponse(stream.readable);
|
38 |
} catch (error) {
|
39 |
console.log(error);
|
40 |
|