Merge branch 'stackblitz-labs:main' into FEAT_BoltDYI_CHAT_FIX
Browse files- app/components/chat/AssistantMessage.tsx +75 -7
- app/components/chat/BaseChat.tsx +16 -2
- app/components/chat/Chat.client.tsx +43 -29
- app/components/chat/Markdown.tsx +0 -2
- app/components/chat/ProgressCompilation.tsx +111 -0
- app/components/ui/Popover.tsx +12 -3
- app/lib/.server/llm/create-summary.ts +68 -9
- app/lib/.server/llm/select-context.ts +5 -4
- app/lib/.server/llm/stream-text.ts +12 -9
- app/lib/.server/llm/utils.ts +2 -2
- app/routes/api.chat.ts +59 -15
- app/routes/api.llmcall.ts +6 -0
- app/types/context.ts +3 -1
- app/utils/selectStarterTemplate.ts +1 -0
app/components/chat/AssistantMessage.tsx
CHANGED
@@ -1,23 +1,55 @@
|
|
1 |
import { memo } from 'react';
|
2 |
import { Markdown } from './Markdown';
|
3 |
import type { JSONValue } from 'ai';
|
4 |
-
import type { ProgressAnnotation } from '~/types/context';
|
5 |
import Popover from '~/components/ui/Popover';
|
|
|
|
|
6 |
|
7 |
interface AssistantMessageProps {
|
8 |
content: string;
|
9 |
annotations?: JSONValue[];
|
10 |
}
|
11 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
export const AssistantMessage = memo(({ content, annotations }: AssistantMessageProps) => {
|
13 |
const filteredAnnotations = (annotations?.filter(
|
14 |
(annotation: JSONValue) => annotation && typeof annotation === 'object' && Object.keys(annotation).includes('type'),
|
15 |
) || []) as { type: string; value: any } & { [key: string]: any }[];
|
16 |
|
17 |
-
let
|
18 |
-
|
19 |
-
)
|
20 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
21 |
|
22 |
const usage: {
|
23 |
completionTokens: number;
|
@@ -29,8 +61,44 @@ export const AssistantMessage = memo(({ content, annotations }: AssistantMessage
|
|
29 |
<div className="overflow-hidden w-full">
|
30 |
<>
|
31 |
<div className=" flex gap-2 items-center text-sm text-bolt-elements-textSecondary mb-2">
|
32 |
-
{
|
33 |
-
<Popover trigger={<div className="i-ph:info" />}>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
34 |
)}
|
35 |
{usage && (
|
36 |
<div>
|
|
|
1 |
import { memo } from 'react';
|
2 |
import { Markdown } from './Markdown';
|
3 |
import type { JSONValue } from 'ai';
|
|
|
4 |
import Popover from '~/components/ui/Popover';
|
5 |
+
import { workbenchStore } from '~/lib/stores/workbench';
|
6 |
+
import { WORK_DIR } from '~/utils/constants';
|
7 |
|
8 |
interface AssistantMessageProps {
|
9 |
content: string;
|
10 |
annotations?: JSONValue[];
|
11 |
}
|
12 |
|
13 |
+
function openArtifactInWorkbench(filePath: string) {
|
14 |
+
filePath = normalizedFilePath(filePath);
|
15 |
+
|
16 |
+
if (workbenchStore.currentView.get() !== 'code') {
|
17 |
+
workbenchStore.currentView.set('code');
|
18 |
+
}
|
19 |
+
|
20 |
+
workbenchStore.setSelectedFile(`${WORK_DIR}/${filePath}`);
|
21 |
+
}
|
22 |
+
|
23 |
+
function normalizedFilePath(path: string) {
|
24 |
+
let normalizedPath = path;
|
25 |
+
|
26 |
+
if (normalizedPath.startsWith(WORK_DIR)) {
|
27 |
+
normalizedPath = path.replace(WORK_DIR, '');
|
28 |
+
}
|
29 |
+
|
30 |
+
if (normalizedPath.startsWith('/')) {
|
31 |
+
normalizedPath = normalizedPath.slice(1);
|
32 |
+
}
|
33 |
+
|
34 |
+
return normalizedPath;
|
35 |
+
}
|
36 |
+
|
37 |
export const AssistantMessage = memo(({ content, annotations }: AssistantMessageProps) => {
|
38 |
const filteredAnnotations = (annotations?.filter(
|
39 |
(annotation: JSONValue) => annotation && typeof annotation === 'object' && Object.keys(annotation).includes('type'),
|
40 |
) || []) as { type: string; value: any } & { [key: string]: any }[];
|
41 |
|
42 |
+
let chatSummary: string | undefined = undefined;
|
43 |
+
|
44 |
+
if (filteredAnnotations.find((annotation) => annotation.type === 'chatSummary')) {
|
45 |
+
chatSummary = filteredAnnotations.find((annotation) => annotation.type === 'chatSummary')?.summary;
|
46 |
+
}
|
47 |
+
|
48 |
+
let codeContext: string[] | undefined = undefined;
|
49 |
+
|
50 |
+
if (filteredAnnotations.find((annotation) => annotation.type === 'codeContext')) {
|
51 |
+
codeContext = filteredAnnotations.find((annotation) => annotation.type === 'codeContext')?.files;
|
52 |
+
}
|
53 |
|
54 |
const usage: {
|
55 |
completionTokens: number;
|
|
|
61 |
<div className="overflow-hidden w-full">
|
62 |
<>
|
63 |
<div className=" flex gap-2 items-center text-sm text-bolt-elements-textSecondary mb-2">
|
64 |
+
{(codeContext || chatSummary) && (
|
65 |
+
<Popover side="right" align="start" trigger={<div className="i-ph:info" />}>
|
66 |
+
{chatSummary && (
|
67 |
+
<div className="max-w-chat">
|
68 |
+
<div className="summary max-h-96 flex flex-col">
|
69 |
+
<h2 className="border border-bolt-elements-borderColor rounded-md p4">Summary</h2>
|
70 |
+
<div style={{ zoom: 0.7 }} className="overflow-y-auto m4">
|
71 |
+
<Markdown>{chatSummary}</Markdown>
|
72 |
+
</div>
|
73 |
+
</div>
|
74 |
+
{codeContext && (
|
75 |
+
<div className="code-context flex flex-col p4 border border-bolt-elements-borderColor rounded-md">
|
76 |
+
<h2>Context</h2>
|
77 |
+
<div className="flex gap-4 mt-4 bolt" style={{ zoom: 0.6 }}>
|
78 |
+
{codeContext.map((x) => {
|
79 |
+
const normalized = normalizedFilePath(x);
|
80 |
+
return (
|
81 |
+
<>
|
82 |
+
<code
|
83 |
+
className="bg-bolt-elements-artifacts-inlineCode-background text-bolt-elements-artifacts-inlineCode-text px-1.5 py-1 rounded-md text-bolt-elements-item-contentAccent hover:underline cursor-pointer"
|
84 |
+
onClick={(e) => {
|
85 |
+
e.preventDefault();
|
86 |
+
e.stopPropagation();
|
87 |
+
openArtifactInWorkbench(normalized);
|
88 |
+
}}
|
89 |
+
>
|
90 |
+
{normalized}
|
91 |
+
</code>
|
92 |
+
</>
|
93 |
+
);
|
94 |
+
})}
|
95 |
+
</div>
|
96 |
+
</div>
|
97 |
+
)}
|
98 |
+
</div>
|
99 |
+
)}
|
100 |
+
<div className="context"></div>
|
101 |
+
</Popover>
|
102 |
)}
|
103 |
{usage && (
|
104 |
<div>
|
app/components/chat/BaseChat.tsx
CHANGED
@@ -2,7 +2,7 @@
|
|
2 |
* @ts-nocheck
|
3 |
* Preventing TS checks with files presented in the video for a better presentation.
|
4 |
*/
|
5 |
-
import type { Message } from 'ai';
|
6 |
import React, { type RefCallback, useEffect, useState } from 'react';
|
7 |
import { ClientOnly } from 'remix-utils/client-only';
|
8 |
import { Menu } from '~/components/sidebar/Menu.client';
|
@@ -32,6 +32,8 @@ import StarterTemplates from './StarterTemplates';
|
|
32 |
import type { ActionAlert } from '~/types/actions';
|
33 |
import ChatAlert from './ChatAlert';
|
34 |
import type { ModelInfo } from '~/lib/modules/llm/types';
|
|
|
|
|
35 |
|
36 |
const TEXTAREA_MIN_HEIGHT = 76;
|
37 |
|
@@ -64,6 +66,7 @@ interface BaseChatProps {
|
|
64 |
setImageDataList?: (dataList: string[]) => void;
|
65 |
actionAlert?: ActionAlert;
|
66 |
clearAlert?: () => void;
|
|
|
67 |
}
|
68 |
|
69 |
export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
|
@@ -97,6 +100,7 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
|
|
97 |
messages,
|
98 |
actionAlert,
|
99 |
clearAlert,
|
|
|
100 |
},
|
101 |
ref,
|
102 |
) => {
|
@@ -108,7 +112,15 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
|
|
108 |
const [recognition, setRecognition] = useState<SpeechRecognition | null>(null);
|
109 |
const [transcript, setTranscript] = useState('');
|
110 |
const [isModelLoading, setIsModelLoading] = useState<string | undefined>('all');
|
111 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
112 |
useEffect(() => {
|
113 |
console.log(transcript);
|
114 |
}, [transcript]);
|
@@ -307,6 +319,7 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
|
|
307 |
className={classNames('pt-6 px-2 sm:px-6', {
|
308 |
'h-full flex flex-col': chatStarted,
|
309 |
})}
|
|
|
310 |
>
|
311 |
<ClientOnly>
|
312 |
{() => {
|
@@ -337,6 +350,7 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
|
|
337 |
/>
|
338 |
)}
|
339 |
</div>
|
|
|
340 |
<div
|
341 |
className={classNames(
|
342 |
'bg-bolt-elements-background-depth-2 p-3 rounded-lg border border-bolt-elements-borderColor relative w-full max-w-chat mx-auto z-prompt',
|
|
|
2 |
* @ts-nocheck
|
3 |
* Preventing TS checks with files presented in the video for a better presentation.
|
4 |
*/
|
5 |
+
import type { JSONValue, Message } from 'ai';
|
6 |
import React, { type RefCallback, useEffect, useState } from 'react';
|
7 |
import { ClientOnly } from 'remix-utils/client-only';
|
8 |
import { Menu } from '~/components/sidebar/Menu.client';
|
|
|
32 |
import type { ActionAlert } from '~/types/actions';
|
33 |
import ChatAlert from './ChatAlert';
|
34 |
import type { ModelInfo } from '~/lib/modules/llm/types';
|
35 |
+
import ProgressCompilation from './ProgressCompilation';
|
36 |
+
import type { ProgressAnnotation } from '~/types/context';
|
37 |
|
38 |
const TEXTAREA_MIN_HEIGHT = 76;
|
39 |
|
|
|
66 |
setImageDataList?: (dataList: string[]) => void;
|
67 |
actionAlert?: ActionAlert;
|
68 |
clearAlert?: () => void;
|
69 |
+
data?: JSONValue[] | undefined;
|
70 |
}
|
71 |
|
72 |
export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
|
|
|
100 |
messages,
|
101 |
actionAlert,
|
102 |
clearAlert,
|
103 |
+
data,
|
104 |
},
|
105 |
ref,
|
106 |
) => {
|
|
|
112 |
const [recognition, setRecognition] = useState<SpeechRecognition | null>(null);
|
113 |
const [transcript, setTranscript] = useState('');
|
114 |
const [isModelLoading, setIsModelLoading] = useState<string | undefined>('all');
|
115 |
+
const [progressAnnotations, setProgressAnnotations] = useState<ProgressAnnotation[]>([]);
|
116 |
+
useEffect(() => {
|
117 |
+
if (data) {
|
118 |
+
const progressList = data.filter(
|
119 |
+
(x) => typeof x === 'object' && (x as any).type === 'progress',
|
120 |
+
) as ProgressAnnotation[];
|
121 |
+
setProgressAnnotations(progressList);
|
122 |
+
}
|
123 |
+
}, [data]);
|
124 |
useEffect(() => {
|
125 |
console.log(transcript);
|
126 |
}, [transcript]);
|
|
|
319 |
className={classNames('pt-6 px-2 sm:px-6', {
|
320 |
'h-full flex flex-col': chatStarted,
|
321 |
})}
|
322 |
+
ref={scrollRef}
|
323 |
>
|
324 |
<ClientOnly>
|
325 |
{() => {
|
|
|
350 |
/>
|
351 |
)}
|
352 |
</div>
|
353 |
+
{progressAnnotations && <ProgressCompilation data={progressAnnotations} />}
|
354 |
<div
|
355 |
className={classNames(
|
356 |
'bg-bolt-elements-background-depth-2 p-3 rounded-lg border border-bolt-elements-borderColor relative w-full max-w-chat mx-auto z-prompt',
|
app/components/chat/Chat.client.tsx
CHANGED
@@ -137,36 +137,49 @@ export const ChatImpl = memo(
|
|
137 |
|
138 |
const [apiKeys, setApiKeys] = useState<Record<string, string>>({});
|
139 |
|
140 |
-
const {
|
141 |
-
|
142 |
-
|
143 |
-
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
-
|
148 |
-
|
149 |
-
|
150 |
-
|
151 |
-
|
152 |
-
|
153 |
-
|
154 |
-
|
155 |
-
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
|
161 |
-
|
162 |
-
|
163 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
164 |
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
useEffect(() => {
|
171 |
const prompt = searchParams.get('prompt');
|
172 |
|
@@ -535,6 +548,7 @@ export const ChatImpl = memo(
|
|
535 |
setImageDataList={setImageDataList}
|
536 |
actionAlert={actionAlert}
|
537 |
clearAlert={() => workbenchStore.clearAlert()}
|
|
|
538 |
/>
|
539 |
);
|
540 |
},
|
|
|
137 |
|
138 |
const [apiKeys, setApiKeys] = useState<Record<string, string>>({});
|
139 |
|
140 |
+
const {
|
141 |
+
messages,
|
142 |
+
isLoading,
|
143 |
+
input,
|
144 |
+
handleInputChange,
|
145 |
+
setInput,
|
146 |
+
stop,
|
147 |
+
append,
|
148 |
+
setMessages,
|
149 |
+
reload,
|
150 |
+
error,
|
151 |
+
data: chatData,
|
152 |
+
setData,
|
153 |
+
} = useChat({
|
154 |
+
api: '/api/chat',
|
155 |
+
body: {
|
156 |
+
apiKeys,
|
157 |
+
files,
|
158 |
+
promptId,
|
159 |
+
contextOptimization: contextOptimizationEnabled,
|
160 |
+
},
|
161 |
+
sendExtraMessageFields: true,
|
162 |
+
onError: (e) => {
|
163 |
+
logger.error('Request failed\n\n', e, error);
|
164 |
+
toast.error(
|
165 |
+
'There was an error processing your request: ' + (e.message ? e.message : 'No details were returned'),
|
166 |
+
);
|
167 |
+
},
|
168 |
+
onFinish: (message, response) => {
|
169 |
+
const usage = response.usage;
|
170 |
+
setData(undefined);
|
171 |
+
|
172 |
+
if (usage) {
|
173 |
+
console.log('Token usage:', usage);
|
174 |
+
|
175 |
+
// You can now use the usage data as needed
|
176 |
+
}
|
177 |
|
178 |
+
logger.debug('Finished streaming');
|
179 |
+
},
|
180 |
+
initialMessages,
|
181 |
+
initialInput: Cookies.get(PROMPT_COOKIE_KEY) || '',
|
182 |
+
});
|
183 |
useEffect(() => {
|
184 |
const prompt = searchParams.get('prompt');
|
185 |
|
|
|
548 |
setImageDataList={setImageDataList}
|
549 |
actionAlert={actionAlert}
|
550 |
clearAlert={() => workbenchStore.clearAlert()}
|
551 |
+
data={chatData}
|
552 |
/>
|
553 |
);
|
554 |
},
|
app/components/chat/Markdown.tsx
CHANGED
@@ -23,8 +23,6 @@ export const Markdown = memo(({ children, html = false, limitedMarkdown = false
|
|
23 |
const components = useMemo(() => {
|
24 |
return {
|
25 |
div: ({ className, children, node, ...props }) => {
|
26 |
-
console.log(className, node);
|
27 |
-
|
28 |
if (className?.includes('__boltArtifact__')) {
|
29 |
const messageId = node?.properties.dataMessageId as string;
|
30 |
|
|
|
23 |
const components = useMemo(() => {
|
24 |
return {
|
25 |
div: ({ className, children, node, ...props }) => {
|
|
|
|
|
26 |
if (className?.includes('__boltArtifact__')) {
|
27 |
const messageId = node?.properties.dataMessageId as string;
|
28 |
|
app/components/chat/ProgressCompilation.tsx
ADDED
@@ -0,0 +1,111 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { AnimatePresence, motion } from 'framer-motion';
|
2 |
+
import React, { useState } from 'react';
|
3 |
+
import type { ProgressAnnotation } from '~/types/context';
|
4 |
+
import { classNames } from '~/utils/classNames';
|
5 |
+
import { cubicEasingFn } from '~/utils/easings';
|
6 |
+
|
7 |
+
export default function ProgressCompilation({ data }: { data?: ProgressAnnotation[] }) {
|
8 |
+
const [progressList, setProgressList] = React.useState<ProgressAnnotation[]>([]);
|
9 |
+
const [expanded, setExpanded] = useState(false);
|
10 |
+
React.useEffect(() => {
|
11 |
+
if (!data || data.length == 0) {
|
12 |
+
setProgressList([]);
|
13 |
+
return;
|
14 |
+
}
|
15 |
+
|
16 |
+
const progressMap = new Map<string, ProgressAnnotation>();
|
17 |
+
data.forEach((x) => {
|
18 |
+
const existingProgress = progressMap.get(x.label);
|
19 |
+
|
20 |
+
if (existingProgress && existingProgress.status === 'complete') {
|
21 |
+
return;
|
22 |
+
}
|
23 |
+
|
24 |
+
progressMap.set(x.label, x);
|
25 |
+
});
|
26 |
+
|
27 |
+
const newData = Array.from(progressMap.values());
|
28 |
+
newData.sort((a, b) => a.order - b.order);
|
29 |
+
setProgressList(newData);
|
30 |
+
}, [data]);
|
31 |
+
|
32 |
+
if (progressList.length === 0) {
|
33 |
+
return <></>;
|
34 |
+
}
|
35 |
+
|
36 |
+
return (
|
37 |
+
<AnimatePresence>
|
38 |
+
<div
|
39 |
+
className={classNames(
|
40 |
+
'bg-bolt-elements-background-depth-2',
|
41 |
+
'border border-bolt-elements-borderColor',
|
42 |
+
'shadow-lg rounded-lg relative w-full max-w-chat mx-auto z-prompt',
|
43 |
+
'p-1',
|
44 |
+
)}
|
45 |
+
style={{ transform: 'translateY(1rem)' }}
|
46 |
+
>
|
47 |
+
<div
|
48 |
+
className={classNames(
|
49 |
+
'bg-bolt-elements-item-backgroundAccent',
|
50 |
+
'p-1 rounded-lg text-bolt-elements-item-contentAccent',
|
51 |
+
'flex ',
|
52 |
+
)}
|
53 |
+
>
|
54 |
+
<div className="flex-1">
|
55 |
+
<AnimatePresence>
|
56 |
+
{expanded ? (
|
57 |
+
<motion.div
|
58 |
+
className="actions"
|
59 |
+
initial={{ height: 0 }}
|
60 |
+
animate={{ height: 'auto' }}
|
61 |
+
exit={{ height: '0px' }}
|
62 |
+
transition={{ duration: 0.15 }}
|
63 |
+
>
|
64 |
+
{progressList.map((x, i) => {
|
65 |
+
return <ProgressItem key={i} progress={x} />;
|
66 |
+
})}
|
67 |
+
</motion.div>
|
68 |
+
) : (
|
69 |
+
<ProgressItem progress={progressList.slice(-1)[0]} />
|
70 |
+
)}
|
71 |
+
</AnimatePresence>
|
72 |
+
</div>
|
73 |
+
<motion.button
|
74 |
+
initial={{ width: 0 }}
|
75 |
+
animate={{ width: 'auto' }}
|
76 |
+
exit={{ width: 0 }}
|
77 |
+
transition={{ duration: 0.15, ease: cubicEasingFn }}
|
78 |
+
className=" p-1 rounded-lg bg-bolt-elements-item-backgroundAccent hover:bg-bolt-elements-artifacts-backgroundHover"
|
79 |
+
onClick={() => setExpanded((v) => !v)}
|
80 |
+
>
|
81 |
+
<div className={expanded ? 'i-ph:caret-up-bold' : 'i-ph:caret-down-bold'}></div>
|
82 |
+
</motion.button>
|
83 |
+
</div>
|
84 |
+
</div>
|
85 |
+
</AnimatePresence>
|
86 |
+
);
|
87 |
+
}
|
88 |
+
|
89 |
+
const ProgressItem = ({ progress }: { progress: ProgressAnnotation }) => {
|
90 |
+
return (
|
91 |
+
<motion.div
|
92 |
+
className={classNames('flex text-sm gap-3')}
|
93 |
+
initial={{ opacity: 0 }}
|
94 |
+
animate={{ opacity: 1 }}
|
95 |
+
exit={{ opacity: 0 }}
|
96 |
+
transition={{ duration: 0.15 }}
|
97 |
+
>
|
98 |
+
<div className="flex items-center gap-1.5 ">
|
99 |
+
<div>
|
100 |
+
{progress.status === 'in-progress' ? (
|
101 |
+
<div className="i-svg-spinners:90-ring-with-bg"></div>
|
102 |
+
) : progress.status === 'complete' ? (
|
103 |
+
<div className="i-ph:check"></div>
|
104 |
+
) : null}
|
105 |
+
</div>
|
106 |
+
{/* {x.label} */}
|
107 |
+
</div>
|
108 |
+
{progress.message}
|
109 |
+
</motion.div>
|
110 |
+
);
|
111 |
+
};
|
app/components/ui/Popover.tsx
CHANGED
@@ -1,15 +1,24 @@
|
|
1 |
import * as Popover from '@radix-ui/react-popover';
|
2 |
import type { PropsWithChildren, ReactNode } from 'react';
|
3 |
|
4 |
-
export default ({
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
<Popover.Root>
|
6 |
<Popover.Trigger asChild>{trigger}</Popover.Trigger>
|
7 |
<Popover.Anchor />
|
8 |
<Popover.Portal>
|
9 |
<Popover.Content
|
10 |
sideOffset={10}
|
11 |
-
side=
|
12 |
-
align=
|
13 |
className="bg-bolt-elements-background-depth-2 text-bolt-elements-item-contentAccent p-2 rounded-md shadow-xl z-workbench"
|
14 |
>
|
15 |
{children}
|
|
|
1 |
import * as Popover from '@radix-ui/react-popover';
|
2 |
import type { PropsWithChildren, ReactNode } from 'react';
|
3 |
|
4 |
+
export default ({
|
5 |
+
children,
|
6 |
+
trigger,
|
7 |
+
side,
|
8 |
+
align,
|
9 |
+
}: PropsWithChildren<{
|
10 |
+
trigger: ReactNode;
|
11 |
+
side: 'top' | 'right' | 'bottom' | 'left' | undefined;
|
12 |
+
align: 'center' | 'start' | 'end' | undefined;
|
13 |
+
}>) => (
|
14 |
<Popover.Root>
|
15 |
<Popover.Trigger asChild>{trigger}</Popover.Trigger>
|
16 |
<Popover.Anchor />
|
17 |
<Popover.Portal>
|
18 |
<Popover.Content
|
19 |
sideOffset={10}
|
20 |
+
side={side}
|
21 |
+
align={align}
|
22 |
className="bg-bolt-elements-background-depth-2 text-bolt-elements-item-contentAccent p-2 rounded-md shadow-xl z-workbench"
|
23 |
>
|
24 |
{children}
|
app/lib/.server/llm/create-summary.ts
CHANGED
@@ -16,7 +16,7 @@ export async function createSummary(props: {
|
|
16 |
contextOptimization?: boolean;
|
17 |
onFinish?: (resp: GenerateTextResult<Record<string, CoreTool<any, any>>, never>) => void;
|
18 |
}) {
|
19 |
-
const { messages, env: serverEnv, apiKeys, providerSettings,
|
20 |
let currentModel = DEFAULT_MODEL;
|
21 |
let currentProvider = DEFAULT_PROVIDER.name;
|
22 |
const processedMessages = messages.map((message) => {
|
@@ -29,9 +29,9 @@ export async function createSummary(props: {
|
|
29 |
} else if (message.role == 'assistant') {
|
30 |
let content = message.content;
|
31 |
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
|
36 |
return { ...message, content };
|
37 |
}
|
@@ -92,6 +92,8 @@ ${summary.summary}`;
|
|
92 |
}
|
93 |
}
|
94 |
|
|
|
|
|
95 |
const extractTextContent = (message: Message) =>
|
96 |
Array.isArray(message.content)
|
97 |
? (message.content.find((item) => item.type === 'text')?.text as string) || ''
|
@@ -100,25 +102,82 @@ ${summary.summary}`;
|
|
100 |
// select files from the list of code file from the project that might be useful for the current request from the user
|
101 |
const resp = await generateText({
|
102 |
system: `
|
103 |
-
You are a software engineer. You are working on a project.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
104 |
|
105 |
-
|
106 |
|
107 |
RULES:
|
108 |
-
* Only provide the summary of the chat till now.
|
109 |
* Do not provide any new information.
|
|
|
|
|
110 |
`,
|
111 |
prompt: `
|
112 |
-
please provide a summary of the chat till now.
|
113 |
-
below is the latest chat:
|
114 |
|
|
|
|
|
|
|
|
|
|
|
|
|
115 |
---
|
|
|
116 |
${slicedMessages
|
117 |
.map((x) => {
|
118 |
return `---\n[${x.role}] ${extractTextContent(x)}\n---`;
|
119 |
})
|
120 |
.join('\n')}
|
|
|
121 |
---
|
|
|
|
|
122 |
`,
|
123 |
model: provider.getModelInstance({
|
124 |
model: currentModel,
|
|
|
16 |
contextOptimization?: boolean;
|
17 |
onFinish?: (resp: GenerateTextResult<Record<string, CoreTool<any, any>>, never>) => void;
|
18 |
}) {
|
19 |
+
const { messages, env: serverEnv, apiKeys, providerSettings, onFinish } = props;
|
20 |
let currentModel = DEFAULT_MODEL;
|
21 |
let currentProvider = DEFAULT_PROVIDER.name;
|
22 |
const processedMessages = messages.map((message) => {
|
|
|
29 |
} else if (message.role == 'assistant') {
|
30 |
let content = message.content;
|
31 |
|
32 |
+
content = simplifyBoltActions(content);
|
33 |
+
content = content.replace(/<div class=\\"__boltThought__\\">.*?<\/div>/s, '');
|
34 |
+
content = content.replace(/<think>.*?<\/think>/s, '');
|
35 |
|
36 |
return { ...message, content };
|
37 |
}
|
|
|
92 |
}
|
93 |
}
|
94 |
|
95 |
+
logger.debug('Sliced Messages:', slicedMessages.length);
|
96 |
+
|
97 |
const extractTextContent = (message: Message) =>
|
98 |
Array.isArray(message.content)
|
99 |
? (message.content.find((item) => item.type === 'text')?.text as string) || ''
|
|
|
102 |
// select files from the list of code file from the project that might be useful for the current request from the user
|
103 |
const resp = await generateText({
|
104 |
system: `
|
105 |
+
You are a software engineer. You are working on a project. you need to summarize the work till now and provide a summary of the chat till now.
|
106 |
+
|
107 |
+
Please only use the following format to generate the summary:
|
108 |
+
---
|
109 |
+
# Project Overview
|
110 |
+
- **Project**: {project_name} - {brief_description}
|
111 |
+
- **Current Phase**: {phase}
|
112 |
+
- **Tech Stack**: {languages}, {frameworks}, {key_dependencies}
|
113 |
+
- **Environment**: {critical_env_details}
|
114 |
+
|
115 |
+
# Conversation Context
|
116 |
+
- **Last Topic**: {main_discussion_point}
|
117 |
+
- **Key Decisions**: {important_decisions_made}
|
118 |
+
- **User Context**:
|
119 |
+
- Technical Level: {expertise_level}
|
120 |
+
- Preferences: {coding_style_preferences}
|
121 |
+
- Communication: {preferred_explanation_style}
|
122 |
+
|
123 |
+
# Implementation Status
|
124 |
+
## Current State
|
125 |
+
- **Active Feature**: {feature_in_development}
|
126 |
+
- **Progress**: {what_works_and_what_doesn't}
|
127 |
+
- **Blockers**: {current_challenges}
|
128 |
+
|
129 |
+
## Code Evolution
|
130 |
+
- **Recent Changes**: {latest_modifications}
|
131 |
+
- **Working Patterns**: {successful_approaches}
|
132 |
+
- **Failed Approaches**: {attempted_solutions_that_failed}
|
133 |
+
|
134 |
+
# Requirements
|
135 |
+
- **Implemented**: {completed_features}
|
136 |
+
- **In Progress**: {current_focus}
|
137 |
+
- **Pending**: {upcoming_features}
|
138 |
+
- **Technical Constraints**: {critical_constraints}
|
139 |
+
|
140 |
+
# Critical Memory
|
141 |
+
- **Must Preserve**: {crucial_technical_context}
|
142 |
+
- **User Requirements**: {specific_user_needs}
|
143 |
+
- **Known Issues**: {documented_problems}
|
144 |
+
|
145 |
+
# Next Actions
|
146 |
+
- **Immediate**: {next_steps}
|
147 |
+
- **Open Questions**: {unresolved_issues}
|
148 |
+
|
149 |
+
---
|
150 |
+
Note:
|
151 |
+
4. Keep entries concise and focused on information needed for continuity
|
152 |
+
|
153 |
|
154 |
+
---
|
155 |
|
156 |
RULES:
|
157 |
+
* Only provide the whole summary of the chat till now.
|
158 |
* Do not provide any new information.
|
159 |
+
* DO not need to think too much just start writing imidiately
|
160 |
+
* do not write any thing other that the summary with with the provided structure
|
161 |
`,
|
162 |
prompt: `
|
|
|
|
|
163 |
|
164 |
+
Here is the previous summary of the chat:
|
165 |
+
<old_summary>
|
166 |
+
${summaryText}
|
167 |
+
</old_summary>
|
168 |
+
|
169 |
+
Below is the chat after that:
|
170 |
---
|
171 |
+
<new_chats>
|
172 |
${slicedMessages
|
173 |
.map((x) => {
|
174 |
return `---\n[${x.role}] ${extractTextContent(x)}\n---`;
|
175 |
})
|
176 |
.join('\n')}
|
177 |
+
</new_chats>
|
178 |
---
|
179 |
+
|
180 |
+
Please provide a summary of the chat till now including the hitorical summary of the chat.
|
181 |
`,
|
182 |
model: provider.getModelInstance({
|
183 |
model: currentModel,
|
app/lib/.server/llm/select-context.ts
CHANGED
@@ -23,7 +23,7 @@ export async function selectContext(props: {
|
|
23 |
summary: string;
|
24 |
onFinish?: (resp: GenerateTextResult<Record<string, CoreTool<any, any>>, never>) => void;
|
25 |
}) {
|
26 |
-
const { messages, env: serverEnv, apiKeys, files, providerSettings,
|
27 |
let currentModel = DEFAULT_MODEL;
|
28 |
let currentProvider = DEFAULT_PROVIDER.name;
|
29 |
const processedMessages = messages.map((message) => {
|
@@ -36,9 +36,10 @@ export async function selectContext(props: {
|
|
36 |
} else if (message.role == 'assistant') {
|
37 |
let content = message.content;
|
38 |
|
39 |
-
|
40 |
-
|
41 |
-
|
|
|
42 |
|
43 |
return { ...message, content };
|
44 |
}
|
|
|
23 |
summary: string;
|
24 |
onFinish?: (resp: GenerateTextResult<Record<string, CoreTool<any, any>>, never>) => void;
|
25 |
}) {
|
26 |
+
const { messages, env: serverEnv, apiKeys, files, providerSettings, summary, onFinish } = props;
|
27 |
let currentModel = DEFAULT_MODEL;
|
28 |
let currentProvider = DEFAULT_PROVIDER.name;
|
29 |
const processedMessages = messages.map((message) => {
|
|
|
36 |
} else if (message.role == 'assistant') {
|
37 |
let content = message.content;
|
38 |
|
39 |
+
content = simplifyBoltActions(content);
|
40 |
+
|
41 |
+
content = content.replace(/<div class=\\"__boltThought__\\">.*?<\/div>/s, '');
|
42 |
+
content = content.replace(/<think>.*?<\/think>/s, '');
|
43 |
|
44 |
return { ...message, content };
|
45 |
}
|
app/lib/.server/llm/stream-text.ts
CHANGED
@@ -7,7 +7,7 @@ import { PromptLibrary } from '~/lib/common/prompt-library';
|
|
7 |
import { allowedHTMLElements } from '~/utils/markdown';
|
8 |
import { LLMManager } from '~/lib/modules/llm/manager';
|
9 |
import { createScopedLogger } from '~/utils/logger';
|
10 |
-
import { createFilesContext, extractPropertiesFromMessage
|
11 |
import { getFilePaths } from './select-context';
|
12 |
|
13 |
export type Messages = Message[];
|
@@ -27,6 +27,7 @@ export async function streamText(props: {
|
|
27 |
contextOptimization?: boolean;
|
28 |
contextFiles?: FileMap;
|
29 |
summary?: string;
|
|
|
30 |
}) {
|
31 |
const {
|
32 |
messages,
|
@@ -51,10 +52,8 @@ export async function streamText(props: {
|
|
51 |
return { ...message, content };
|
52 |
} else if (message.role == 'assistant') {
|
53 |
let content = message.content;
|
54 |
-
|
55 |
-
|
56 |
-
content = simplifyBoltActions(content);
|
57 |
-
}
|
58 |
|
59 |
return { ...message, content };
|
60 |
}
|
@@ -110,7 +109,7 @@ Below are all the files present in the project:
|
|
110 |
${filePaths.join('\n')}
|
111 |
---
|
112 |
|
113 |
-
Below is the context loaded into context buffer for you to have knowledge of and might need changes to fullfill current user request.
|
114 |
CONTEXT BUFFER:
|
115 |
---
|
116 |
${codeContext}
|
@@ -126,10 +125,14 @@ ${props.summary}
|
|
126 |
---
|
127 |
`;
|
128 |
|
129 |
-
|
|
|
|
|
|
|
130 |
|
131 |
-
|
132 |
-
|
|
|
133 |
}
|
134 |
}
|
135 |
}
|
|
|
7 |
import { allowedHTMLElements } from '~/utils/markdown';
|
8 |
import { LLMManager } from '~/lib/modules/llm/manager';
|
9 |
import { createScopedLogger } from '~/utils/logger';
|
10 |
+
import { createFilesContext, extractPropertiesFromMessage } from './utils';
|
11 |
import { getFilePaths } from './select-context';
|
12 |
|
13 |
export type Messages = Message[];
|
|
|
27 |
contextOptimization?: boolean;
|
28 |
contextFiles?: FileMap;
|
29 |
summary?: string;
|
30 |
+
messageSliceId?: number;
|
31 |
}) {
|
32 |
const {
|
33 |
messages,
|
|
|
52 |
return { ...message, content };
|
53 |
} else if (message.role == 'assistant') {
|
54 |
let content = message.content;
|
55 |
+
content = content.replace(/<div class=\\"__boltThought__\\">.*?<\/div>/s, '');
|
56 |
+
content = content.replace(/<think>.*?<\/think>/s, '');
|
|
|
|
|
57 |
|
58 |
return { ...message, content };
|
59 |
}
|
|
|
109 |
${filePaths.join('\n')}
|
110 |
---
|
111 |
|
112 |
+
Below is the artifact containing the context loaded into context buffer for you to have knowledge of and might need changes to fullfill current user request.
|
113 |
CONTEXT BUFFER:
|
114 |
---
|
115 |
${codeContext}
|
|
|
125 |
---
|
126 |
`;
|
127 |
|
128 |
+
if (props.messageSliceId) {
|
129 |
+
processedMessages = processedMessages.slice(props.messageSliceId);
|
130 |
+
} else {
|
131 |
+
const lastMessage = processedMessages.pop();
|
132 |
|
133 |
+
if (lastMessage) {
|
134 |
+
processedMessages = [lastMessage];
|
135 |
+
}
|
136 |
}
|
137 |
}
|
138 |
}
|
app/lib/.server/llm/utils.ts
CHANGED
@@ -82,10 +82,10 @@ export function createFilesContext(files: FileMap, useRelativePath?: boolean) {
|
|
82 |
filePath = path.replace('/home/project/', '');
|
83 |
}
|
84 |
|
85 |
-
return `<file
|
86 |
});
|
87 |
|
88 |
-
return `<
|
89 |
}
|
90 |
|
91 |
export function extractCurrentContext(messages: Message[]) {
|
|
|
82 |
filePath = path.replace('/home/project/', '');
|
83 |
}
|
84 |
|
85 |
+
return `<boltAction type="file" filePath="${filePath}">${codeWithLinesNumbers}</boltAction>`;
|
86 |
});
|
87 |
|
88 |
+
return `<boltArtifact id="code-content" title="Code Content" >\n${fileContexts.join('\n')}\n</boltArtifact>`;
|
89 |
}
|
90 |
|
91 |
export function extractCurrentContext(messages: Message[]) {
|
app/routes/api.chat.ts
CHANGED
@@ -10,6 +10,7 @@ import { getFilePaths, selectContext } from '~/lib/.server/llm/select-context';
|
|
10 |
import type { ContextAnnotation, ProgressAnnotation } from '~/types/context';
|
11 |
import { WORK_DIR } from '~/utils/constants';
|
12 |
import { createSummary } from '~/lib/.server/llm/create-summary';
|
|
|
13 |
|
14 |
export async function action(args: ActionFunctionArgs) {
|
15 |
return chatAction(args);
|
@@ -70,15 +71,21 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
|
|
70 |
const filePaths = getFilePaths(files || {});
|
71 |
let filteredFiles: FileMap | undefined = undefined;
|
72 |
let summary: string | undefined = undefined;
|
|
|
|
|
|
|
|
|
|
|
73 |
|
74 |
if (filePaths.length > 0 && contextOptimization) {
|
75 |
-
dataStream.writeData('HI ');
|
76 |
logger.debug('Generating Chat Summary');
|
77 |
-
dataStream.
|
78 |
type: 'progress',
|
79 |
-
|
80 |
-
|
81 |
-
|
|
|
|
|
82 |
|
83 |
// Create a summary of the chat
|
84 |
console.log(`Messages count: ${messages.length}`);
|
@@ -99,6 +106,13 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
|
|
99 |
}
|
100 |
},
|
101 |
});
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
102 |
|
103 |
dataStream.writeMessageAnnotation({
|
104 |
type: 'chatSummary',
|
@@ -108,11 +122,13 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
|
|
108 |
|
109 |
// Update context buffer
|
110 |
logger.debug('Updating Context Buffer');
|
111 |
-
dataStream.
|
112 |
type: 'progress',
|
113 |
-
|
114 |
-
|
115 |
-
|
|
|
|
|
116 |
|
117 |
// Select context files
|
118 |
console.log(`Messages count: ${messages.length}`);
|
@@ -152,12 +168,15 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
|
|
152 |
}),
|
153 |
} as ContextAnnotation);
|
154 |
|
155 |
-
dataStream.
|
156 |
type: 'progress',
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
|
|
|
|
|
|
|
161 |
}
|
162 |
|
163 |
// Stream the text
|
@@ -181,6 +200,13 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
|
|
181 |
totalTokens: cumulativeUsage.totalTokens,
|
182 |
},
|
183 |
});
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
184 |
await new Promise((resolve) => setTimeout(resolve, 0));
|
185 |
|
186 |
// stream.close();
|
@@ -195,8 +221,14 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
|
|
195 |
|
196 |
logger.info(`Reached max token limit (${MAX_TOKENS}): Continuing message (${switchesLeft} switches left)`);
|
197 |
|
|
|
|
|
198 |
messages.push({ id: generateId(), role: 'assistant', content });
|
199 |
-
messages.push({
|
|
|
|
|
|
|
|
|
200 |
|
201 |
const result = await streamText({
|
202 |
messages,
|
@@ -207,6 +239,9 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
|
|
207 |
providerSettings,
|
208 |
promptId,
|
209 |
contextOptimization,
|
|
|
|
|
|
|
210 |
});
|
211 |
|
212 |
result.mergeIntoDataStream(dataStream);
|
@@ -226,6 +261,14 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
|
|
226 |
},
|
227 |
};
|
228 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
229 |
const result = await streamText({
|
230 |
messages,
|
231 |
env: context.cloudflare?.env,
|
@@ -237,6 +280,7 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
|
|
237 |
contextOptimization,
|
238 |
contextFiles: filteredFiles,
|
239 |
summary,
|
|
|
240 |
});
|
241 |
|
242 |
(async () => {
|
|
|
10 |
import type { ContextAnnotation, ProgressAnnotation } from '~/types/context';
|
11 |
import { WORK_DIR } from '~/utils/constants';
|
12 |
import { createSummary } from '~/lib/.server/llm/create-summary';
|
13 |
+
import { extractPropertiesFromMessage } from '~/lib/.server/llm/utils';
|
14 |
|
15 |
export async function action(args: ActionFunctionArgs) {
|
16 |
return chatAction(args);
|
|
|
71 |
const filePaths = getFilePaths(files || {});
|
72 |
let filteredFiles: FileMap | undefined = undefined;
|
73 |
let summary: string | undefined = undefined;
|
74 |
+
let messageSliceId = 0;
|
75 |
+
|
76 |
+
if (messages.length > 3) {
|
77 |
+
messageSliceId = messages.length - 3;
|
78 |
+
}
|
79 |
|
80 |
if (filePaths.length > 0 && contextOptimization) {
|
|
|
81 |
logger.debug('Generating Chat Summary');
|
82 |
+
dataStream.writeData({
|
83 |
type: 'progress',
|
84 |
+
label: 'summary',
|
85 |
+
status: 'in-progress',
|
86 |
+
order: progressCounter++,
|
87 |
+
message: 'Analysing Request',
|
88 |
+
} satisfies ProgressAnnotation);
|
89 |
|
90 |
// Create a summary of the chat
|
91 |
console.log(`Messages count: ${messages.length}`);
|
|
|
106 |
}
|
107 |
},
|
108 |
});
|
109 |
+
dataStream.writeData({
|
110 |
+
type: 'progress',
|
111 |
+
label: 'summary',
|
112 |
+
status: 'complete',
|
113 |
+
order: progressCounter++,
|
114 |
+
message: 'Analysis Complete',
|
115 |
+
} satisfies ProgressAnnotation);
|
116 |
|
117 |
dataStream.writeMessageAnnotation({
|
118 |
type: 'chatSummary',
|
|
|
122 |
|
123 |
// Update context buffer
|
124 |
logger.debug('Updating Context Buffer');
|
125 |
+
dataStream.writeData({
|
126 |
type: 'progress',
|
127 |
+
label: 'context',
|
128 |
+
status: 'in-progress',
|
129 |
+
order: progressCounter++,
|
130 |
+
message: 'Determining Files to Read',
|
131 |
+
} satisfies ProgressAnnotation);
|
132 |
|
133 |
// Select context files
|
134 |
console.log(`Messages count: ${messages.length}`);
|
|
|
168 |
}),
|
169 |
} as ContextAnnotation);
|
170 |
|
171 |
+
dataStream.writeData({
|
172 |
type: 'progress',
|
173 |
+
label: 'context',
|
174 |
+
status: 'complete',
|
175 |
+
order: progressCounter++,
|
176 |
+
message: 'Code Files Selected',
|
177 |
+
} satisfies ProgressAnnotation);
|
178 |
+
|
179 |
+
// logger.debug('Code Files Selected');
|
180 |
}
|
181 |
|
182 |
// Stream the text
|
|
|
200 |
totalTokens: cumulativeUsage.totalTokens,
|
201 |
},
|
202 |
});
|
203 |
+
dataStream.writeData({
|
204 |
+
type: 'progress',
|
205 |
+
label: 'response',
|
206 |
+
status: 'complete',
|
207 |
+
order: progressCounter++,
|
208 |
+
message: 'Response Generated',
|
209 |
+
} satisfies ProgressAnnotation);
|
210 |
await new Promise((resolve) => setTimeout(resolve, 0));
|
211 |
|
212 |
// stream.close();
|
|
|
221 |
|
222 |
logger.info(`Reached max token limit (${MAX_TOKENS}): Continuing message (${switchesLeft} switches left)`);
|
223 |
|
224 |
+
const lastUserMessage = messages.filter((x) => x.role == 'user').slice(-1)[0];
|
225 |
+
const { model, provider } = extractPropertiesFromMessage(lastUserMessage);
|
226 |
messages.push({ id: generateId(), role: 'assistant', content });
|
227 |
+
messages.push({
|
228 |
+
id: generateId(),
|
229 |
+
role: 'user',
|
230 |
+
content: `[Model: ${model}]\n\n[Provider: ${provider}]\n\n${CONTINUE_PROMPT}`,
|
231 |
+
});
|
232 |
|
233 |
const result = await streamText({
|
234 |
messages,
|
|
|
239 |
providerSettings,
|
240 |
promptId,
|
241 |
contextOptimization,
|
242 |
+
contextFiles: filteredFiles,
|
243 |
+
summary,
|
244 |
+
messageSliceId,
|
245 |
});
|
246 |
|
247 |
result.mergeIntoDataStream(dataStream);
|
|
|
261 |
},
|
262 |
};
|
263 |
|
264 |
+
dataStream.writeData({
|
265 |
+
type: 'progress',
|
266 |
+
label: 'response',
|
267 |
+
status: 'in-progress',
|
268 |
+
order: progressCounter++,
|
269 |
+
message: 'Generating Response',
|
270 |
+
} satisfies ProgressAnnotation);
|
271 |
+
|
272 |
const result = await streamText({
|
273 |
messages,
|
274 |
env: context.cloudflare?.env,
|
|
|
280 |
contextOptimization,
|
281 |
contextFiles: filteredFiles,
|
282 |
summary,
|
283 |
+
messageSliceId,
|
284 |
});
|
285 |
|
286 |
(async () => {
|
app/routes/api.llmcall.ts
CHANGED
@@ -7,6 +7,7 @@ import { MAX_TOKENS } from '~/lib/.server/llm/constants';
|
|
7 |
import { LLMManager } from '~/lib/modules/llm/manager';
|
8 |
import type { ModelInfo } from '~/lib/modules/llm/types';
|
9 |
import { getApiKeysFromCookie, getProviderSettingsFromCookie } from '~/lib/api/cookies';
|
|
|
10 |
|
11 |
export async function action(args: ActionFunctionArgs) {
|
12 |
return llmCallAction(args);
|
@@ -21,6 +22,8 @@ async function getModelList(options: {
|
|
21 |
return llmManager.updateModelList(options);
|
22 |
}
|
23 |
|
|
|
|
|
24 |
async function llmCallAction({ context, request }: ActionFunctionArgs) {
|
25 |
const { system, message, model, provider, streamOutput } = await request.json<{
|
26 |
system: string;
|
@@ -106,6 +109,8 @@ async function llmCallAction({ context, request }: ActionFunctionArgs) {
|
|
106 |
throw new Error('Provider not found');
|
107 |
}
|
108 |
|
|
|
|
|
109 |
const result = await generateText({
|
110 |
system,
|
111 |
messages: [
|
@@ -123,6 +128,7 @@ async function llmCallAction({ context, request }: ActionFunctionArgs) {
|
|
123 |
maxTokens: dynamicMaxTokens,
|
124 |
toolChoice: 'none',
|
125 |
});
|
|
|
126 |
|
127 |
return new Response(JSON.stringify(result), {
|
128 |
status: 200,
|
|
|
7 |
import { LLMManager } from '~/lib/modules/llm/manager';
|
8 |
import type { ModelInfo } from '~/lib/modules/llm/types';
|
9 |
import { getApiKeysFromCookie, getProviderSettingsFromCookie } from '~/lib/api/cookies';
|
10 |
+
import { createScopedLogger } from '~/utils/logger';
|
11 |
|
12 |
export async function action(args: ActionFunctionArgs) {
|
13 |
return llmCallAction(args);
|
|
|
22 |
return llmManager.updateModelList(options);
|
23 |
}
|
24 |
|
25 |
+
const logger = createScopedLogger('api.llmcall');
|
26 |
+
|
27 |
async function llmCallAction({ context, request }: ActionFunctionArgs) {
|
28 |
const { system, message, model, provider, streamOutput } = await request.json<{
|
29 |
system: string;
|
|
|
109 |
throw new Error('Provider not found');
|
110 |
}
|
111 |
|
112 |
+
logger.info(`Generating response Provider: ${provider.name}, Model: ${modelDetails.name}`);
|
113 |
+
|
114 |
const result = await generateText({
|
115 |
system,
|
116 |
messages: [
|
|
|
128 |
maxTokens: dynamicMaxTokens,
|
129 |
toolChoice: 'none',
|
130 |
});
|
131 |
+
logger.info(`Generated response`);
|
132 |
|
133 |
return new Response(JSON.stringify(result), {
|
134 |
status: 200,
|
app/types/context.ts
CHANGED
@@ -11,6 +11,8 @@ export type ContextAnnotation =
|
|
11 |
|
12 |
export type ProgressAnnotation = {
|
13 |
type: 'progress';
|
14 |
-
|
|
|
|
|
15 |
message: string;
|
16 |
};
|
|
|
11 |
|
12 |
export type ProgressAnnotation = {
|
13 |
type: 'progress';
|
14 |
+
label: string;
|
15 |
+
status: 'in-progress' | 'complete';
|
16 |
+
order: number;
|
17 |
message: string;
|
18 |
};
|
app/utils/selectStarterTemplate.ts
CHANGED
@@ -59,6 +59,7 @@ Instructions:
|
|
59 |
5. If no perfect match exists, recommend the closest option
|
60 |
|
61 |
Important: Provide only the selection tags in your response, no additional text.
|
|
|
62 |
`;
|
63 |
|
64 |
const templates: Template[] = STARTER_TEMPLATES.filter((t) => !t.name.includes('shadcn'));
|
|
|
59 |
5. If no perfect match exists, recommend the closest option
|
60 |
|
61 |
Important: Provide only the selection tags in your response, no additional text.
|
62 |
+
MOST IMPORTANT: YOU DONT HAVE TIME TO THINK JUST START RESPONDING BASED ON HUNCH
|
63 |
`;
|
64 |
|
65 |
const templates: Template[] = STARTER_TEMPLATES.filter((t) => !t.name.includes('shadcn'));
|