codacus commited on
Commit
ea5c624
·
1 Parent(s): 2af32b0

feat(context optimization):improved context management and redused chat overhead

Browse files
app/components/chat/Chat.client.tsx CHANGED
@@ -91,7 +91,7 @@ export const ChatImpl = memo(
91
  const [chatStarted, setChatStarted] = useState(initialMessages.length > 0);
92
  const [uploadedFiles, setUploadedFiles] = useState<File[]>([]); // Move here
93
  const [imageDataList, setImageDataList] = useState<string[]>([]); // Move here
94
-
95
  const [model, setModel] = useState(() => {
96
  const savedModel = Cookies.get('selectedModel');
97
  return savedModel || DEFAULT_MODEL;
@@ -111,6 +111,7 @@ export const ChatImpl = memo(
111
  api: '/api/chat',
112
  body: {
113
  apiKeys,
 
114
  },
115
  onError: (error) => {
116
  logger.error('Request failed\n\n', error);
 
91
  const [chatStarted, setChatStarted] = useState(initialMessages.length > 0);
92
  const [uploadedFiles, setUploadedFiles] = useState<File[]>([]); // Move here
93
  const [imageDataList, setImageDataList] = useState<string[]>([]); // Move here
94
+ const files = useStore(workbenchStore.files);
95
  const [model, setModel] = useState(() => {
96
  const savedModel = Cookies.get('selectedModel');
97
  return savedModel || DEFAULT_MODEL;
 
111
  api: '/api/chat',
112
  body: {
113
  apiKeys,
114
+ files,
115
  },
116
  onError: (error) => {
117
  logger.error('Request failed\n\n', error);
app/lib/.server/llm/stream-text.ts CHANGED
@@ -3,6 +3,7 @@ import { getModel } from '~/lib/.server/llm/model';
3
  import { MAX_TOKENS } from './constants';
4
  import { getSystemPrompt } from './prompts';
5
  import { DEFAULT_MODEL, DEFAULT_PROVIDER, getModelList, MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';
 
6
 
7
  interface ToolResult<Name extends string, Args, Result> {
8
  toolCallId: string;
@@ -22,6 +23,79 @@ export type Messages = Message[];
22
 
23
  export type StreamingOptions = Omit<Parameters<typeof _streamText>[0], 'model'>;
24
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  function extractPropertiesFromMessage(message: Message): { model: string; provider: string; content: string } {
26
  const textContent = Array.isArray(message.content)
27
  ? message.content.find((item) => item.type === 'text')?.text || ''
@@ -63,6 +137,7 @@ export async function streamText(
63
  env: Env,
64
  options?: StreamingOptions,
65
  apiKeys?: Record<string, string>,
 
66
  ) {
67
  let currentModel = DEFAULT_MODEL;
68
  let currentProvider = DEFAULT_PROVIDER.name;
@@ -77,6 +152,11 @@ export async function streamText(
77
 
78
  currentProvider = provider;
79
 
 
 
 
 
 
80
  return { ...message, content };
81
  }
82
 
@@ -87,9 +167,19 @@ export async function streamText(
87
 
88
  const dynamicMaxTokens = modelDetails && modelDetails.maxTokenAllowed ? modelDetails.maxTokenAllowed : MAX_TOKENS;
89
 
 
 
 
 
 
 
 
 
 
 
90
  return _streamText({
91
  model: getModel(currentProvider, currentModel, env, apiKeys) as any,
92
- system: getSystemPrompt(),
93
  maxTokens: dynamicMaxTokens,
94
  messages: convertToCoreMessages(processedMessages as any),
95
  ...options,
 
3
  import { MAX_TOKENS } from './constants';
4
  import { getSystemPrompt } from './prompts';
5
  import { DEFAULT_MODEL, DEFAULT_PROVIDER, getModelList, MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';
6
+ import ignore from 'ignore';
7
 
8
  interface ToolResult<Name extends string, Args, Result> {
9
  toolCallId: string;
 
23
 
24
  export type StreamingOptions = Omit<Parameters<typeof _streamText>[0], 'model'>;
25
 
26
+ export interface File {
27
+ type: 'file';
28
+ content: string;
29
+ isBinary: boolean;
30
+ }
31
+
32
+ export interface Folder {
33
+ type: 'folder';
34
+ }
35
+
36
+ type Dirent = File | Folder;
37
+
38
+ export type FileMap = Record<string, Dirent | undefined>;
39
+
40
+ function simplifyBoltActions(input: string): string {
41
+ // Using regex to match boltAction tags that have type="file"
42
+ const regex = /(<boltAction[^>]*type="file"[^>]*>)([\s\S]*?)(<\/boltAction>)/g;
43
+
44
+ // Replace each matching occurrence
45
+ return input.replace(regex, (_0, openingTag, _2, closingTag) => {
46
+ return `${openingTag}\n ...\n ${closingTag}`;
47
+ });
48
+ }
49
+
50
+ // Common patterns to ignore, similar to .gitignore
51
+ const IGNORE_PATTERNS = [
52
+ 'node_modules/**',
53
+ '.git/**',
54
+ 'dist/**',
55
+ 'build/**',
56
+ '.next/**',
57
+ 'coverage/**',
58
+ '.cache/**',
59
+ '.vscode/**',
60
+ '.idea/**',
61
+ '**/*.log',
62
+ '**/.DS_Store',
63
+ '**/npm-debug.log*',
64
+ '**/yarn-debug.log*',
65
+ '**/yarn-error.log*',
66
+ '**/*lock.json',
67
+ '**/*lock.yml',
68
+ ];
69
+ const ig = ignore().add(IGNORE_PATTERNS);
70
+
71
+ function createFilesContext(files: FileMap) {
72
+ let filePaths = Object.keys(files);
73
+ filePaths = filePaths.filter((x) => {
74
+ const relPath = x.replace('/home/project/', '');
75
+ return !ig.ignores(relPath);
76
+ });
77
+ console.log(filePaths);
78
+
79
+ const fileContexts = filePaths
80
+ .filter((x) => files[x] && files[x].type == 'file')
81
+ .map((path) => {
82
+ const dirent = files[path];
83
+
84
+ if (!dirent || dirent.type == 'folder') {
85
+ return '';
86
+ }
87
+
88
+ const codeWithLinesNumbers = dirent.content
89
+ .split('\n')
90
+ .map((v, i) => `${i + 1}|${v}`)
91
+ .join('\n');
92
+
93
+ return `<file path="${path}">\n${codeWithLinesNumbers}\n</file>`;
94
+ });
95
+
96
+ return `Below are the code files present in the webcontainer:\ncode format:\n<line number>|<line content>\n <codebase>${fileContexts.join('\n\n')}\n\n</codebase>`;
97
+ }
98
+
99
  function extractPropertiesFromMessage(message: Message): { model: string; provider: string; content: string } {
100
  const textContent = Array.isArray(message.content)
101
  ? message.content.find((item) => item.type === 'text')?.text || ''
 
137
  env: Env,
138
  options?: StreamingOptions,
139
  apiKeys?: Record<string, string>,
140
+ files?: FileMap,
141
  ) {
142
  let currentModel = DEFAULT_MODEL;
143
  let currentProvider = DEFAULT_PROVIDER.name;
 
152
 
153
  currentProvider = provider;
154
 
155
+ return { ...message, content };
156
+ } else if (message.role == 'assistant') {
157
+ let content = message.content;
158
+ content = simplifyBoltActions(content);
159
+
160
  return { ...message, content };
161
  }
162
 
 
167
 
168
  const dynamicMaxTokens = modelDetails && modelDetails.maxTokenAllowed ? modelDetails.maxTokenAllowed : MAX_TOKENS;
169
 
170
+ let systemPrompt = getSystemPrompt();
171
+ let codeContext = '';
172
+
173
+ if (files) {
174
+ codeContext = createFilesContext(files);
175
+ systemPrompt = `${systemPrompt}\n\n ${codeContext}`;
176
+ }
177
+
178
+ console.log({ codeContext, processedMessages });
179
+
180
  return _streamText({
181
  model: getModel(currentProvider, currentModel, env, apiKeys) as any,
182
+ system: systemPrompt,
183
  maxTokens: dynamicMaxTokens,
184
  messages: convertToCoreMessages(processedMessages as any),
185
  ...options,
app/lib/hooks/useMessageParser.ts CHANGED
@@ -23,14 +23,14 @@ const messageParser = new StreamingMessageParser({
23
  logger.trace('onActionOpen', data.action);
24
 
25
  // we only add shell actions when when the close tag got parsed because only then we have the content
26
- if (data.action.type !== 'shell') {
27
  workbenchStore.addAction(data);
28
  }
29
  },
30
  onActionClose: (data) => {
31
  logger.trace('onActionClose', data.action);
32
 
33
- if (data.action.type === 'shell') {
34
  workbenchStore.addAction(data);
35
  }
36
 
 
23
  logger.trace('onActionOpen', data.action);
24
 
25
  // we only add shell actions when when the close tag got parsed because only then we have the content
26
+ if (data.action.type === 'file') {
27
  workbenchStore.addAction(data);
28
  }
29
  },
30
  onActionClose: (data) => {
31
  logger.trace('onActionClose', data.action);
32
 
33
+ if (data.action.type !== 'file') {
34
  workbenchStore.addAction(data);
35
  }
36
 
app/lib/stores/workbench.ts CHANGED
@@ -261,9 +261,9 @@ export class WorkbenchStore {
261
  this.artifacts.setKey(messageId, { ...artifact, ...state });
262
  }
263
  addAction(data: ActionCallbackData) {
264
- this._addAction(data);
265
 
266
- // this.addToExecutionQueue(()=>this._addAction(data))
267
  }
268
  async _addAction(data: ActionCallbackData) {
269
  const { messageId } = data;
@@ -293,6 +293,12 @@ export class WorkbenchStore {
293
  unreachable('Artifact not found');
294
  }
295
 
 
 
 
 
 
 
296
  if (data.action.type === 'file') {
297
  const wc = await webcontainer;
298
  const fullPath = nodePath.join(wc.workdir, data.action.filePath);
 
261
  this.artifacts.setKey(messageId, { ...artifact, ...state });
262
  }
263
  addAction(data: ActionCallbackData) {
264
+ // this._addAction(data);
265
 
266
+ this.addToExecutionQueue(() => this._addAction(data));
267
  }
268
  async _addAction(data: ActionCallbackData) {
269
  const { messageId } = data;
 
293
  unreachable('Artifact not found');
294
  }
295
 
296
+ const action = artifact.runner.actions.get()[data.actionId];
297
+
298
+ if (action.executed) {
299
+ return;
300
+ }
301
+
302
  if (data.action.type === 'file') {
303
  const wc = await webcontainer;
304
  const fullPath = nodePath.join(wc.workdir, data.action.filePath);
app/routes/api.chat.ts CHANGED
@@ -29,9 +29,9 @@ function parseCookies(cookieHeader: string) {
29
  }
30
 
31
  async function chatAction({ context, request }: ActionFunctionArgs) {
32
- const { messages } = await request.json<{
33
  messages: Messages;
34
- model: string;
35
  }>();
36
 
37
  const cookieHeader = request.headers.get('Cookie');
@@ -60,13 +60,13 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
60
  messages.push({ role: 'assistant', content });
61
  messages.push({ role: 'user', content: CONTINUE_PROMPT });
62
 
63
- const result = await streamText(messages, context.cloudflare.env, options, apiKeys);
64
 
65
  return stream.switchSource(result.toAIStream());
66
  },
67
  };
68
 
69
- const result = await streamText(messages, context.cloudflare.env, options, apiKeys);
70
 
71
  stream.switchSource(result.toAIStream());
72
 
 
29
  }
30
 
31
  async function chatAction({ context, request }: ActionFunctionArgs) {
32
+ const { messages, files } = await request.json<{
33
  messages: Messages;
34
+ files: any;
35
  }>();
36
 
37
  const cookieHeader = request.headers.get('Cookie');
 
60
  messages.push({ role: 'assistant', content });
61
  messages.push({ role: 'user', content: CONTINUE_PROMPT });
62
 
63
+ const result = await streamText(messages, context.cloudflare.env, options, apiKeys, files);
64
 
65
  return stream.switchSource(result.toAIStream());
66
  },
67
  };
68
 
69
+ const result = await streamText(messages, context.cloudflare.env, options, apiKeys, files);
70
 
71
  stream.switchSource(result.toAIStream());
72