updated implementation
Browse files
app/commit.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{ "commit": "
|
|
|
1 |
+
{ "commit": "070e911be17e1e1f3994220c3ed89b0060c67bd2" }
|
app/components/chat/AssistantMessage.tsx
CHANGED
@@ -1,15 +1,22 @@
|
|
1 |
import { memo } from 'react';
|
2 |
import { Markdown } from './Markdown';
|
3 |
-
import {
|
4 |
|
5 |
interface AssistantMessageProps {
|
6 |
content: string;
|
|
|
7 |
}
|
8 |
|
9 |
-
export const AssistantMessage = memo(({ content }: AssistantMessageProps) => {
|
10 |
-
const
|
11 |
-
|
12 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
|
14 |
return (
|
15 |
<div className="overflow-hidden w-full">
|
@@ -18,7 +25,7 @@ export const AssistantMessage = memo(({ content }: AssistantMessageProps) => {
|
|
18 |
Tokens: {usage.totalTokens} (prompt: {usage.promptTokens}, completion: {usage.completionTokens})
|
19 |
</div>
|
20 |
)}
|
21 |
-
<Markdown html>{
|
22 |
</div>
|
23 |
);
|
24 |
});
|
|
|
1 |
import { memo } from 'react';
|
2 |
import { Markdown } from './Markdown';
|
3 |
+
import type { JSONValue } from 'ai';
|
4 |
|
5 |
interface AssistantMessageProps {
|
6 |
content: string;
|
7 |
+
annotations?: JSONValue[];
|
8 |
}
|
9 |
|
10 |
+
export const AssistantMessage = memo(({ content, annotations }: AssistantMessageProps) => {
|
11 |
+
const filteredAnnotations = (annotations?.filter(
|
12 |
+
(annotation: JSONValue) => annotation && typeof annotation === 'object' && Object.keys(annotation).includes('type'),
|
13 |
+
) || []) as { type: string; value: any }[];
|
14 |
+
|
15 |
+
const usage: {
|
16 |
+
completionTokens: number;
|
17 |
+
promptTokens: number;
|
18 |
+
totalTokens: number;
|
19 |
+
} = filteredAnnotations.find((annotation) => annotation.type === 'usage')?.value;
|
20 |
|
21 |
return (
|
22 |
<div className="overflow-hidden w-full">
|
|
|
25 |
Tokens: {usage.totalTokens} (prompt: {usage.promptTokens}, completion: {usage.completionTokens})
|
26 |
</div>
|
27 |
)}
|
28 |
+
<Markdown html>{content}</Markdown>
|
29 |
</div>
|
30 |
);
|
31 |
});
|
app/components/chat/Messages.client.tsx
CHANGED
@@ -65,7 +65,11 @@ export const Messages = React.forwardRef<HTMLDivElement, MessagesProps>((props:
|
|
65 |
</div>
|
66 |
)}
|
67 |
<div className="grid grid-col-1 w-full">
|
68 |
-
{isUserMessage ?
|
|
|
|
|
|
|
|
|
69 |
</div>
|
70 |
{!isUserMessage && (
|
71 |
<div className="flex gap-2 flex-col lg:flex-row">
|
|
|
65 |
</div>
|
66 |
)}
|
67 |
<div className="grid grid-col-1 w-full">
|
68 |
+
{isUserMessage ? (
|
69 |
+
<UserMessage content={content} />
|
70 |
+
) : (
|
71 |
+
<AssistantMessage content={content} annotations={message.annotations} />
|
72 |
+
)}
|
73 |
</div>
|
74 |
{!isUserMessage && (
|
75 |
<div className="flex gap-2 flex-col lg:flex-row">
|
app/lib/.server/llm/switchable-stream.ts
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
export default class SwitchableStream extends TransformStream {
|
2 |
-
_controller: TransformStreamDefaultController | null = null;
|
3 |
private _currentReader: ReadableStreamDefaultReader | null = null;
|
4 |
private _switches = 0;
|
5 |
|
|
|
1 |
export default class SwitchableStream extends TransformStream {
|
2 |
+
private _controller: TransformStreamDefaultController | null = null;
|
3 |
private _currentReader: ReadableStreamDefaultReader | null = null;
|
4 |
private _switches = 0;
|
5 |
|
app/routes/api.chat.ts
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
import { type ActionFunctionArgs } from '@remix-run/cloudflare';
|
|
|
2 |
import { MAX_RESPONSE_SEGMENTS, MAX_TOKENS } from '~/lib/.server/llm/constants';
|
3 |
import { CONTINUE_PROMPT } from '~/lib/.server/llm/prompts';
|
4 |
import { streamText, type Messages, type StreamingOptions } from '~/lib/.server/llm/stream-text';
|
@@ -53,26 +54,30 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
|
|
53 |
onFinish: async ({ text: content, finishReason, usage }) => {
|
54 |
console.log('usage', usage);
|
55 |
|
56 |
-
if (usage
|
57 |
cumulativeUsage.completionTokens += usage.completionTokens || 0;
|
58 |
cumulativeUsage.promptTokens += usage.promptTokens || 0;
|
59 |
cumulativeUsage.totalTokens += usage.totalTokens || 0;
|
60 |
-
|
61 |
-
// Send usage info in message metadata for assistant messages
|
62 |
-
const usageMetadata = `0:"[Usage: ${JSON.stringify({
|
63 |
-
completionTokens: cumulativeUsage.completionTokens,
|
64 |
-
promptTokens: cumulativeUsage.promptTokens,
|
65 |
-
totalTokens: cumulativeUsage.totalTokens,
|
66 |
-
})}\n]"`;
|
67 |
-
|
68 |
-
console.log(usageMetadata);
|
69 |
-
|
70 |
-
const encodedData = new TextEncoder().encode(usageMetadata);
|
71 |
-
stream._controller.enqueue(encodedData);
|
72 |
}
|
73 |
|
74 |
if (finishReason !== 'length') {
|
75 |
-
return stream
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
76 |
}
|
77 |
|
78 |
if (stream.switches >= MAX_RESPONSE_SEGMENTS) {
|
|
|
1 |
import { type ActionFunctionArgs } from '@remix-run/cloudflare';
|
2 |
+
import { createDataStream } from 'ai';
|
3 |
import { MAX_RESPONSE_SEGMENTS, MAX_TOKENS } from '~/lib/.server/llm/constants';
|
4 |
import { CONTINUE_PROMPT } from '~/lib/.server/llm/prompts';
|
5 |
import { streamText, type Messages, type StreamingOptions } from '~/lib/.server/llm/stream-text';
|
|
|
54 |
onFinish: async ({ text: content, finishReason, usage }) => {
|
55 |
console.log('usage', usage);
|
56 |
|
57 |
+
if (usage) {
|
58 |
cumulativeUsage.completionTokens += usage.completionTokens || 0;
|
59 |
cumulativeUsage.promptTokens += usage.promptTokens || 0;
|
60 |
cumulativeUsage.totalTokens += usage.totalTokens || 0;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
61 |
}
|
62 |
|
63 |
if (finishReason !== 'length') {
|
64 |
+
return stream
|
65 |
+
.switchSource(
|
66 |
+
createDataStream({
|
67 |
+
async execute(dataStream) {
|
68 |
+
dataStream.writeMessageAnnotation({
|
69 |
+
type: 'usage',
|
70 |
+
value: {
|
71 |
+
completionTokens: cumulativeUsage.completionTokens,
|
72 |
+
promptTokens: cumulativeUsage.promptTokens,
|
73 |
+
totalTokens: cumulativeUsage.totalTokens,
|
74 |
+
},
|
75 |
+
});
|
76 |
+
},
|
77 |
+
onError: (error: any) => `Custom error: ${error.message}`,
|
78 |
+
}),
|
79 |
+
)
|
80 |
+
.then(() => stream.close());
|
81 |
}
|
82 |
|
83 |
if (stream.switches >= MAX_RESPONSE_SEGMENTS) {
|
app/utils/constants.ts
CHANGED
@@ -9,7 +9,6 @@ export const WORK_DIR = `/home/${WORK_DIR_NAME}`;
|
|
9 |
export const MODIFICATIONS_TAG_NAME = 'bolt_file_modifications';
|
10 |
export const MODEL_REGEX = /^\[Model: (.*?)\]\n\n/;
|
11 |
export const PROVIDER_REGEX = /\[Provider: (.*?)\]\n\n/;
|
12 |
-
export const USAGE_REGEX = /\[Usage: ({.*?})\]/; // Keep this regex for assistant messages
|
13 |
export const DEFAULT_MODEL = 'claude-3-5-sonnet-latest';
|
14 |
export const PROMPT_COOKIE_KEY = 'cachedPrompt';
|
15 |
|
|
|
9 |
export const MODIFICATIONS_TAG_NAME = 'bolt_file_modifications';
|
10 |
export const MODEL_REGEX = /^\[Model: (.*?)\]\n\n/;
|
11 |
export const PROVIDER_REGEX = /\[Provider: (.*?)\]\n\n/;
|
|
|
12 |
export const DEFAULT_MODEL = 'claude-3-5-sonnet-latest';
|
13 |
export const PROMPT_COOKIE_KEY = 'cachedPrompt';
|
14 |
|