codacus commited on
Commit
fe2f008
·
unverified ·
2 Parent(s): b0743e0 05a5f85

Merge branch 'main' into github-import

Browse files
README.md CHANGED
@@ -40,18 +40,18 @@ https://thinktank.ottomator.ai
40
  - ✅ Together Integration (@mouimet-infinisoft)
41
  - ✅ Mobile friendly (@qwikode)
42
  - ✅ Better prompt enhancing (@SujalXplores)
43
- - **HIGH PRIORITY** - ALMOST DONE - Attach images to prompts (@atrokhym)
44
  - ⬜ **HIGH PRIORITY** - Prevent Bolt from rewriting files as often (file locking and diffs)
45
  - ⬜ **HIGH PRIORITY** - Better prompting for smaller LLMs (code window sometimes doesn't start)
46
  - ⬜ **HIGH PRIORITY** - Run agents in the backend as opposed to a single model call
47
- - ⬜ Azure Open AI API Integration
48
- - ⬜ Perplexity Integration
49
- - ⬜ Vertex AI Integration
50
  - ⬜ Deploy directly to Vercel/Netlify/other similar platforms
51
  - ⬜ Have LLM plan the project in a MD file for better results/transparency
52
  - ⬜ VSCode Integration with git-like confirmations
53
  - ⬜ Upload documents for knowledge - UI design templates, a code base to reference coding style, etc.
54
  - ⬜ Voice prompting
 
 
 
55
 
56
  ## Bolt.new: AI-Powered Full-Stack Web Development in the Browser
57
 
 
40
  - ✅ Together Integration (@mouimet-infinisoft)
41
  - ✅ Mobile friendly (@qwikode)
42
  - ✅ Better prompt enhancing (@SujalXplores)
43
+ - Attach images to prompts (@atrokhym)
44
  - ⬜ **HIGH PRIORITY** - Prevent Bolt from rewriting files as often (file locking and diffs)
45
  - ⬜ **HIGH PRIORITY** - Better prompting for smaller LLMs (code window sometimes doesn't start)
46
  - ⬜ **HIGH PRIORITY** - Run agents in the backend as opposed to a single model call
 
 
 
47
  - ⬜ Deploy directly to Vercel/Netlify/other similar platforms
48
  - ⬜ Have LLM plan the project in a MD file for better results/transparency
49
  - ⬜ VSCode Integration with git-like confirmations
50
  - ⬜ Upload documents for knowledge - UI design templates, a code base to reference coding style, etc.
51
  - ⬜ Voice prompting
52
+ - ⬜ Azure Open AI API Integration
53
+ - ⬜ Perplexity Integration
54
+ - ⬜ Vertex AI Integration
55
 
56
  ## Bolt.new: AI-Powered Full-Stack Web Development in the Browser
57
 
app/components/chat/Artifact.tsx CHANGED
@@ -28,6 +28,7 @@ interface ArtifactProps {
28
  export const Artifact = memo(({ messageId }: ArtifactProps) => {
29
  const userToggledActions = useRef(false);
30
  const [showActions, setShowActions] = useState(false);
 
31
 
32
  const artifacts = useStore(workbenchStore.artifacts);
33
  const artifact = artifacts[messageId];
@@ -47,6 +48,11 @@ export const Artifact = memo(({ messageId }: ArtifactProps) => {
47
  if (actions.length && !showActions && !userToggledActions.current) {
48
  setShowActions(true);
49
  }
 
 
 
 
 
50
  }, [actions]);
51
 
52
  return (
@@ -59,6 +65,18 @@ export const Artifact = memo(({ messageId }: ArtifactProps) => {
59
  workbenchStore.showWorkbench.set(!showWorkbench);
60
  }}
61
  >
 
 
 
 
 
 
 
 
 
 
 
 
62
  <div className="px-5 p-3.5 w-full text-left">
63
  <div className="w-full text-bolt-elements-textPrimary font-medium leading-5 text-sm">{artifact?.title}</div>
64
  <div className="w-full w-full text-bolt-elements-textSecondary text-xs mt-0.5">Click to open Workbench</div>
@@ -66,7 +84,7 @@ export const Artifact = memo(({ messageId }: ArtifactProps) => {
66
  </button>
67
  <div className="bg-bolt-elements-artifacts-borderColor w-[1px]" />
68
  <AnimatePresence>
69
- {actions.length && (
70
  <motion.button
71
  initial={{ width: 0 }}
72
  animate={{ width: 'auto' }}
@@ -83,7 +101,7 @@ export const Artifact = memo(({ messageId }: ArtifactProps) => {
83
  </AnimatePresence>
84
  </div>
85
  <AnimatePresence>
86
- {showActions && actions.length > 0 && (
87
  <motion.div
88
  className="actions"
89
  initial={{ height: 0 }}
@@ -92,6 +110,7 @@ export const Artifact = memo(({ messageId }: ArtifactProps) => {
92
  transition={{ duration: 0.15 }}
93
  >
94
  <div className="bg-bolt-elements-artifacts-borderColor h-[1px]" />
 
95
  <div className="p-5 text-left bg-bolt-elements-actions-background">
96
  <ActionList actions={actions} />
97
  </div>
 
28
  export const Artifact = memo(({ messageId }: ArtifactProps) => {
29
  const userToggledActions = useRef(false);
30
  const [showActions, setShowActions] = useState(false);
31
+ const [allActionFinished, setAllActionFinished] = useState(false);
32
 
33
  const artifacts = useStore(workbenchStore.artifacts);
34
  const artifact = artifacts[messageId];
 
48
  if (actions.length && !showActions && !userToggledActions.current) {
49
  setShowActions(true);
50
  }
51
+
52
+ if (actions.length !== 0) {
53
+ const finished = !actions.find((action) => action.status !== 'complete');
54
+ setAllActionFinished(finished);
55
+ }
56
  }, [actions]);
57
 
58
  return (
 
65
  workbenchStore.showWorkbench.set(!showWorkbench);
66
  }}
67
  >
68
+ {artifact.type == 'bundled' && (
69
+ <>
70
+ <div className="p-4">
71
+ {allActionFinished ? (
72
+ <div className={'i-ph:files-light'} style={{ fontSize: '2rem' }}></div>
73
+ ) : (
74
+ <div className={'i-svg-spinners:90-ring-with-bg'} style={{ fontSize: '2rem' }}></div>
75
+ )}
76
+ </div>
77
+ <div className="bg-bolt-elements-artifacts-borderColor w-[1px]" />
78
+ </>
79
+ )}
80
  <div className="px-5 p-3.5 w-full text-left">
81
  <div className="w-full text-bolt-elements-textPrimary font-medium leading-5 text-sm">{artifact?.title}</div>
82
  <div className="w-full w-full text-bolt-elements-textSecondary text-xs mt-0.5">Click to open Workbench</div>
 
84
  </button>
85
  <div className="bg-bolt-elements-artifacts-borderColor w-[1px]" />
86
  <AnimatePresence>
87
+ {actions.length && artifact.type !== 'bundled' && (
88
  <motion.button
89
  initial={{ width: 0 }}
90
  animate={{ width: 'auto' }}
 
101
  </AnimatePresence>
102
  </div>
103
  <AnimatePresence>
104
+ {artifact.type !== 'bundled' && showActions && actions.length > 0 && (
105
  <motion.div
106
  className="actions"
107
  initial={{ height: 0 }}
 
110
  transition={{ duration: 0.15 }}
111
  >
112
  <div className="bg-bolt-elements-artifacts-borderColor h-[1px]" />
113
+
114
  <div className="p-5 text-left bg-bolt-elements-actions-background">
115
  <ActionList actions={actions} />
116
  </div>
app/components/chat/BaseChat.tsx CHANGED
@@ -23,44 +23,9 @@ import { ImportButtons } from '~/components/chat/chatExportAndImport/ImportButto
23
  import { ExamplePrompts } from '~/components/chat/ExamplePrompts';
24
  import GitCloneButton from './GitCloneButton';
25
 
26
- // @ts-ignore TODO: Introduce proper types
27
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
28
- const ModelSelector = ({ model, setModel, provider, setProvider, modelList, providerList, apiKeys }) => {
29
- return (
30
- <div className="mb-2 flex gap-2 flex-col sm:flex-row">
31
- <select
32
- value={provider?.name}
33
- onChange={(e) => {
34
- setProvider(providerList.find((p: ProviderInfo) => p.name === e.target.value));
35
-
36
- const firstModel = [...modelList].find((m) => m.provider == e.target.value);
37
- setModel(firstModel ? firstModel.name : '');
38
- }}
39
- className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all"
40
- >
41
- {providerList.map((provider: ProviderInfo) => (
42
- <option key={provider.name} value={provider.name}>
43
- {provider.name}
44
- </option>
45
- ))}
46
- </select>
47
- <select
48
- key={provider?.name}
49
- value={model}
50
- onChange={(e) => setModel(e.target.value)}
51
- className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all lg:max-w-[70%]"
52
- >
53
- {[...modelList]
54
- .filter((e) => e.provider == provider?.name && e.name)
55
- .map((modelOption) => (
56
- <option key={modelOption.name} value={modelOption.name}>
57
- {modelOption.label}
58
- </option>
59
- ))}
60
- </select>
61
- </div>
62
- );
63
- };
64
 
65
  const TEXTAREA_MIN_HEIGHT = 76;
66
 
@@ -86,6 +51,10 @@ interface BaseChatProps {
86
  enhancePrompt?: () => void;
87
  importChat?: (description: string, messages: Message[]) => Promise<void>;
88
  exportChat?: () => void;
 
 
 
 
89
  }
90
 
91
  export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
@@ -97,20 +66,24 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
97
  showChat = true,
98
  chatStarted = false,
99
  isStreaming = false,
100
- enhancingPrompt = false,
101
- promptEnhanced = false,
102
- messages,
103
- input = '',
104
  model,
105
  setModel,
106
  provider,
107
  setProvider,
108
- sendMessage,
 
109
  handleInputChange,
 
110
  enhancePrompt,
 
111
  handleStop,
112
  importChat,
113
  exportChat,
 
 
 
 
 
114
  },
115
  ref,
116
  ) => {
@@ -118,7 +91,11 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
118
  const [apiKeys, setApiKeys] = useState<Record<string, string>>({});
119
  const [modelList, setModelList] = useState(MODEL_LIST);
120
  const [isModelSettingsCollapsed, setIsModelSettingsCollapsed] = useState(false);
 
 
 
121
 
 
122
  useEffect(() => {
123
  // Load API keys from cookies on component mount
124
  try {
@@ -141,8 +118,72 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
141
  initializeModelList().then((modelList) => {
142
  setModelList(modelList);
143
  });
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
144
  }, []);
145
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
146
  const updateApiKey = (provider: string, key: string) => {
147
  try {
148
  const updatedApiKeys = { ...apiKeys, [provider]: key };
@@ -160,6 +201,58 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
160
  }
161
  };
162
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
163
  const baseChat = (
164
  <div
165
  ref={ref}
@@ -276,7 +369,14 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
276
  )}
277
  </div>
278
  </div>
279
-
 
 
 
 
 
 
 
280
  <div
281
  className={classNames(
282
  'relative shadow-xs border border-bolt-elements-borderColor backdrop-blur rounded-lg',
@@ -284,9 +384,41 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
284
  >
285
  <textarea
286
  ref={textareaRef}
287
- className={
288
- 'w-full pl-4 pt-4 pr-16 focus:outline-none resize-none text-bolt-elements-textPrimary placeholder-bolt-elements-textTertiary bg-transparent text-sm'
289
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
290
  onKeyDown={(event) => {
291
  if (event.key === 'Enter') {
292
  if (event.shiftKey) {
@@ -295,13 +427,19 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
295
 
296
  event.preventDefault();
297
 
298
- sendMessage?.(event);
 
 
 
 
 
299
  }
300
  }}
301
  value={input}
302
  onChange={(event) => {
303
  handleInputChange?.(event);
304
  }}
 
305
  style={{
306
  minHeight: TEXTAREA_MIN_HEIGHT,
307
  maxHeight: TEXTAREA_MAX_HEIGHT,
@@ -312,7 +450,7 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
312
  <ClientOnly>
313
  {() => (
314
  <SendButton
315
- show={input.length > 0 || isStreaming}
316
  isStreaming={isStreaming}
317
  onClick={(event) => {
318
  if (isStreaming) {
@@ -320,21 +458,28 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
320
  return;
321
  }
322
 
323
- sendMessage?.(event);
 
 
324
  }}
325
  />
326
  )}
327
  </ClientOnly>
328
  <div className="flex justify-between items-center text-sm p-4 pt-2">
329
  <div className="flex gap-1 items-center">
 
 
 
330
  <IconButton
331
  title="Enhance prompt"
332
  disabled={input.length === 0 || enhancingPrompt}
333
- className={classNames('transition-all', {
334
- 'opacity-100!': enhancingPrompt,
335
- 'text-bolt-elements-item-contentAccent! pr-1.5 enabled:hover:bg-bolt-elements-item-backgroundAccent!':
336
- promptEnhanced,
337
- })}
 
 
338
  onClick={() => enhancePrompt?.()}
339
  >
340
  {enhancingPrompt ? (
@@ -349,6 +494,13 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
349
  </>
350
  )}
351
  </IconButton>
 
 
 
 
 
 
 
352
  {chatStarted && <ClientOnly>{() => <ExportChatButton exportChat={exportChat} />}</ClientOnly>}
353
  </div>
354
  {input.length > 3 ? (
@@ -368,7 +520,15 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
368
  <GitCloneButton importChat={importChat} />
369
  </div>
370
  )}
371
- {!chatStarted && ExamplePrompts(sendMessage)}
 
 
 
 
 
 
 
 
372
  </div>
373
  <ClientOnly>{() => <Workbench chatStarted={chatStarted} isStreaming={isStreaming} />}</ClientOnly>
374
  </div>
 
23
  import { ExamplePrompts } from '~/components/chat/ExamplePrompts';
24
  import GitCloneButton from './GitCloneButton';
25
 
26
+ import FilePreview from './FilePreview';
27
+ import { ModelSelector } from '~/components/chat/ModelSelector';
28
+ import { SpeechRecognitionButton } from '~/components/chat/SpeechRecognition';
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
 
30
  const TEXTAREA_MIN_HEIGHT = 76;
31
 
 
51
  enhancePrompt?: () => void;
52
  importChat?: (description: string, messages: Message[]) => Promise<void>;
53
  exportChat?: () => void;
54
+ uploadedFiles?: File[];
55
+ setUploadedFiles?: (files: File[]) => void;
56
+ imageDataList?: string[];
57
+ setImageDataList?: (dataList: string[]) => void;
58
  }
59
 
60
  export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
 
66
  showChat = true,
67
  chatStarted = false,
68
  isStreaming = false,
 
 
 
 
69
  model,
70
  setModel,
71
  provider,
72
  setProvider,
73
+ input = '',
74
+ enhancingPrompt,
75
  handleInputChange,
76
+ promptEnhanced,
77
  enhancePrompt,
78
+ sendMessage,
79
  handleStop,
80
  importChat,
81
  exportChat,
82
+ uploadedFiles = [],
83
+ setUploadedFiles,
84
+ imageDataList = [],
85
+ setImageDataList,
86
+ messages,
87
  },
88
  ref,
89
  ) => {
 
91
  const [apiKeys, setApiKeys] = useState<Record<string, string>>({});
92
  const [modelList, setModelList] = useState(MODEL_LIST);
93
  const [isModelSettingsCollapsed, setIsModelSettingsCollapsed] = useState(false);
94
+ const [isListening, setIsListening] = useState(false);
95
+ const [recognition, setRecognition] = useState<SpeechRecognition | null>(null);
96
+ const [transcript, setTranscript] = useState('');
97
 
98
+ console.log(transcript);
99
  useEffect(() => {
100
  // Load API keys from cookies on component mount
101
  try {
 
118
  initializeModelList().then((modelList) => {
119
  setModelList(modelList);
120
  });
121
+
122
+ if (typeof window !== 'undefined' && ('SpeechRecognition' in window || 'webkitSpeechRecognition' in window)) {
123
+ const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
124
+ const recognition = new SpeechRecognition();
125
+ recognition.continuous = true;
126
+ recognition.interimResults = true;
127
+
128
+ recognition.onresult = (event) => {
129
+ const transcript = Array.from(event.results)
130
+ .map((result) => result[0])
131
+ .map((result) => result.transcript)
132
+ .join('');
133
+
134
+ setTranscript(transcript);
135
+
136
+ if (handleInputChange) {
137
+ const syntheticEvent = {
138
+ target: { value: transcript },
139
+ } as React.ChangeEvent<HTMLTextAreaElement>;
140
+ handleInputChange(syntheticEvent);
141
+ }
142
+ };
143
+
144
+ recognition.onerror = (event) => {
145
+ console.error('Speech recognition error:', event.error);
146
+ setIsListening(false);
147
+ };
148
+
149
+ setRecognition(recognition);
150
+ }
151
  }, []);
152
 
153
+ const startListening = () => {
154
+ if (recognition) {
155
+ recognition.start();
156
+ setIsListening(true);
157
+ }
158
+ };
159
+
160
+ const stopListening = () => {
161
+ if (recognition) {
162
+ recognition.stop();
163
+ setIsListening(false);
164
+ }
165
+ };
166
+
167
+ const handleSendMessage = (event: React.UIEvent, messageInput?: string) => {
168
+ if (sendMessage) {
169
+ sendMessage(event, messageInput);
170
+
171
+ if (recognition) {
172
+ recognition.abort(); // Stop current recognition
173
+ setTranscript(''); // Clear transcript
174
+ setIsListening(false);
175
+
176
+ // Clear the input by triggering handleInputChange with empty value
177
+ if (handleInputChange) {
178
+ const syntheticEvent = {
179
+ target: { value: '' },
180
+ } as React.ChangeEvent<HTMLTextAreaElement>;
181
+ handleInputChange(syntheticEvent);
182
+ }
183
+ }
184
+ }
185
+ };
186
+
187
  const updateApiKey = (provider: string, key: string) => {
188
  try {
189
  const updatedApiKeys = { ...apiKeys, [provider]: key };
 
201
  }
202
  };
203
 
204
+ const handleFileUpload = () => {
205
+ const input = document.createElement('input');
206
+ input.type = 'file';
207
+ input.accept = 'image/*';
208
+
209
+ input.onchange = async (e) => {
210
+ const file = (e.target as HTMLInputElement).files?.[0];
211
+
212
+ if (file) {
213
+ const reader = new FileReader();
214
+
215
+ reader.onload = (e) => {
216
+ const base64Image = e.target?.result as string;
217
+ setUploadedFiles?.([...uploadedFiles, file]);
218
+ setImageDataList?.([...imageDataList, base64Image]);
219
+ };
220
+ reader.readAsDataURL(file);
221
+ }
222
+ };
223
+
224
+ input.click();
225
+ };
226
+
227
+ const handlePaste = async (e: React.ClipboardEvent) => {
228
+ const items = e.clipboardData?.items;
229
+
230
+ if (!items) {
231
+ return;
232
+ }
233
+
234
+ for (const item of items) {
235
+ if (item.type.startsWith('image/')) {
236
+ e.preventDefault();
237
+
238
+ const file = item.getAsFile();
239
+
240
+ if (file) {
241
+ const reader = new FileReader();
242
+
243
+ reader.onload = (e) => {
244
+ const base64Image = e.target?.result as string;
245
+ setUploadedFiles?.([...uploadedFiles, file]);
246
+ setImageDataList?.([...imageDataList, base64Image]);
247
+ };
248
+ reader.readAsDataURL(file);
249
+ }
250
+
251
+ break;
252
+ }
253
+ }
254
+ };
255
+
256
  const baseChat = (
257
  <div
258
  ref={ref}
 
369
  )}
370
  </div>
371
  </div>
372
+ <FilePreview
373
+ files={uploadedFiles}
374
+ imageDataList={imageDataList}
375
+ onRemove={(index) => {
376
+ setUploadedFiles?.(uploadedFiles.filter((_, i) => i !== index));
377
+ setImageDataList?.(imageDataList.filter((_, i) => i !== index));
378
+ }}
379
+ />
380
  <div
381
  className={classNames(
382
  'relative shadow-xs border border-bolt-elements-borderColor backdrop-blur rounded-lg',
 
384
  >
385
  <textarea
386
  ref={textareaRef}
387
+ className={classNames(
388
+ 'w-full pl-4 pt-4 pr-16 focus:outline-none resize-none text-bolt-elements-textPrimary placeholder-bolt-elements-textTertiary bg-transparent text-sm',
389
+ 'transition-all duration-200',
390
+ 'hover:border-bolt-elements-focus',
391
+ )}
392
+ onDragEnter={(e) => {
393
+ e.preventDefault();
394
+ e.currentTarget.style.border = '2px solid #1488fc';
395
+ }}
396
+ onDragOver={(e) => {
397
+ e.preventDefault();
398
+ e.currentTarget.style.border = '2px solid #1488fc';
399
+ }}
400
+ onDragLeave={(e) => {
401
+ e.preventDefault();
402
+ e.currentTarget.style.border = '1px solid var(--bolt-elements-borderColor)';
403
+ }}
404
+ onDrop={(e) => {
405
+ e.preventDefault();
406
+ e.currentTarget.style.border = '1px solid var(--bolt-elements-borderColor)';
407
+
408
+ const files = Array.from(e.dataTransfer.files);
409
+ files.forEach((file) => {
410
+ if (file.type.startsWith('image/')) {
411
+ const reader = new FileReader();
412
+
413
+ reader.onload = (e) => {
414
+ const base64Image = e.target?.result as string;
415
+ setUploadedFiles?.([...uploadedFiles, file]);
416
+ setImageDataList?.([...imageDataList, base64Image]);
417
+ };
418
+ reader.readAsDataURL(file);
419
+ }
420
+ });
421
+ }}
422
  onKeyDown={(event) => {
423
  if (event.key === 'Enter') {
424
  if (event.shiftKey) {
 
427
 
428
  event.preventDefault();
429
 
430
+ if (isStreaming) {
431
+ handleStop?.();
432
+ return;
433
+ }
434
+
435
+ handleSendMessage?.(event);
436
  }
437
  }}
438
  value={input}
439
  onChange={(event) => {
440
  handleInputChange?.(event);
441
  }}
442
+ onPaste={handlePaste}
443
  style={{
444
  minHeight: TEXTAREA_MIN_HEIGHT,
445
  maxHeight: TEXTAREA_MAX_HEIGHT,
 
450
  <ClientOnly>
451
  {() => (
452
  <SendButton
453
+ show={input.length > 0 || isStreaming || uploadedFiles.length > 0}
454
  isStreaming={isStreaming}
455
  onClick={(event) => {
456
  if (isStreaming) {
 
458
  return;
459
  }
460
 
461
+ if (input.length > 0 || uploadedFiles.length > 0) {
462
+ handleSendMessage?.(event);
463
+ }
464
  }}
465
  />
466
  )}
467
  </ClientOnly>
468
  <div className="flex justify-between items-center text-sm p-4 pt-2">
469
  <div className="flex gap-1 items-center">
470
+ <IconButton title="Upload file" className="transition-all" onClick={() => handleFileUpload()}>
471
+ <div className="i-ph:paperclip text-xl"></div>
472
+ </IconButton>
473
  <IconButton
474
  title="Enhance prompt"
475
  disabled={input.length === 0 || enhancingPrompt}
476
+ className={classNames(
477
+ 'transition-all',
478
+ enhancingPrompt ? 'opacity-100' : '',
479
+ promptEnhanced ? 'text-bolt-elements-item-contentAccent' : '',
480
+ promptEnhanced ? 'pr-1.5' : '',
481
+ promptEnhanced ? 'enabled:hover:bg-bolt-elements-item-backgroundAccent' : '',
482
+ )}
483
  onClick={() => enhancePrompt?.()}
484
  >
485
  {enhancingPrompt ? (
 
494
  </>
495
  )}
496
  </IconButton>
497
+
498
+ <SpeechRecognitionButton
499
+ isListening={isListening}
500
+ onStart={startListening}
501
+ onStop={stopListening}
502
+ disabled={isStreaming}
503
+ />
504
  {chatStarted && <ClientOnly>{() => <ExportChatButton exportChat={exportChat} />}</ClientOnly>}
505
  </div>
506
  {input.length > 3 ? (
 
520
  <GitCloneButton importChat={importChat} />
521
  </div>
522
  )}
523
+ {!chatStarted &&
524
+ ExamplePrompts((event, messageInput) => {
525
+ if (isStreaming) {
526
+ handleStop?.();
527
+ return;
528
+ }
529
+
530
+ handleSendMessage?.(event, messageInput);
531
+ })}
532
  </div>
533
  <ClientOnly>{() => <Workbench chatStarted={chatStarted} isStreaming={isStreaming} />}</ClientOnly>
534
  </div>
app/components/chat/Chat.client.tsx CHANGED
@@ -12,7 +12,6 @@ import { useMessageParser, usePromptEnhancer, useShortcuts, useSnapScroll } from
12
  import { description, useChatHistory } from '~/lib/persistence';
13
  import { chatStore } from '~/lib/stores/chat';
14
  import { workbenchStore } from '~/lib/stores/workbench';
15
- import { fileModificationsToHTML } from '~/utils/diff';
16
  import { DEFAULT_MODEL, DEFAULT_PROVIDER, PROMPT_COOKIE_KEY, PROVIDER_LIST } from '~/utils/constants';
17
  import { cubicEasingFn } from '~/utils/easings';
18
  import { createScopedLogger, renderLogger } from '~/utils/logger';
@@ -89,8 +88,10 @@ export const ChatImpl = memo(
89
  useShortcuts();
90
 
91
  const textareaRef = useRef<HTMLTextAreaElement>(null);
92
-
93
  const [chatStarted, setChatStarted] = useState(initialMessages.length > 0);
 
 
 
94
  const [model, setModel] = useState(() => {
95
  const savedModel = Cookies.get('selectedModel');
96
  return savedModel || DEFAULT_MODEL;
@@ -206,8 +207,6 @@ export const ChatImpl = memo(
206
  runAnimation();
207
 
208
  if (fileModifications !== undefined) {
209
- const diff = fileModificationsToHTML(fileModifications);
210
-
211
  /**
212
  * If we have file modifications we append a new user message manually since we have to prefix
213
  * the user input with the file modifications and we don't want the new user input to appear
@@ -215,7 +214,19 @@ export const ChatImpl = memo(
215
  * manually reset the input and we'd have to manually pass in file attachments. However, those
216
  * aren't relevant here.
217
  */
218
- append({ role: 'user', content: `[Model: ${model}]\n\n[Provider: ${provider.name}]\n\n${diff}\n\n${_input}` });
 
 
 
 
 
 
 
 
 
 
 
 
219
 
220
  /**
221
  * After sending a new message we reset all modifications since the model
@@ -223,12 +234,28 @@ export const ChatImpl = memo(
223
  */
224
  workbenchStore.resetAllFileModifications();
225
  } else {
226
- append({ role: 'user', content: `[Model: ${model}]\n\n[Provider: ${provider.name}]\n\n${_input}` });
 
 
 
 
 
 
 
 
 
 
 
 
227
  }
228
 
229
  setInput('');
230
  Cookies.remove(PROMPT_COOKIE_KEY);
231
 
 
 
 
 
232
  resetEnhancer();
233
 
234
  textareaRef.current?.blur();
@@ -321,6 +348,10 @@ export const ChatImpl = memo(
321
  apiKeys,
322
  );
323
  }}
 
 
 
 
324
  />
325
  );
326
  },
 
12
  import { description, useChatHistory } from '~/lib/persistence';
13
  import { chatStore } from '~/lib/stores/chat';
14
  import { workbenchStore } from '~/lib/stores/workbench';
 
15
  import { DEFAULT_MODEL, DEFAULT_PROVIDER, PROMPT_COOKIE_KEY, PROVIDER_LIST } from '~/utils/constants';
16
  import { cubicEasingFn } from '~/utils/easings';
17
  import { createScopedLogger, renderLogger } from '~/utils/logger';
 
88
  useShortcuts();
89
 
90
  const textareaRef = useRef<HTMLTextAreaElement>(null);
 
91
  const [chatStarted, setChatStarted] = useState(initialMessages.length > 0);
92
+ const [uploadedFiles, setUploadedFiles] = useState<File[]>([]); // Move here
93
+ const [imageDataList, setImageDataList] = useState<string[]>([]); // Move here
94
+
95
  const [model, setModel] = useState(() => {
96
  const savedModel = Cookies.get('selectedModel');
97
  return savedModel || DEFAULT_MODEL;
 
207
  runAnimation();
208
 
209
  if (fileModifications !== undefined) {
 
 
210
  /**
211
  * If we have file modifications we append a new user message manually since we have to prefix
212
  * the user input with the file modifications and we don't want the new user input to appear
 
214
  * manually reset the input and we'd have to manually pass in file attachments. However, those
215
  * aren't relevant here.
216
  */
217
+ append({
218
+ role: 'user',
219
+ content: [
220
+ {
221
+ type: 'text',
222
+ text: `[Model: ${model}]\n\n[Provider: ${provider.name}]\n\n${_input}`,
223
+ },
224
+ ...imageDataList.map((imageData) => ({
225
+ type: 'image',
226
+ image: imageData,
227
+ })),
228
+ ] as any, // Type assertion to bypass compiler check
229
+ });
230
 
231
  /**
232
  * After sending a new message we reset all modifications since the model
 
234
  */
235
  workbenchStore.resetAllFileModifications();
236
  } else {
237
+ append({
238
+ role: 'user',
239
+ content: [
240
+ {
241
+ type: 'text',
242
+ text: `[Model: ${model}]\n\n[Provider: ${provider.name}]\n\n${_input}`,
243
+ },
244
+ ...imageDataList.map((imageData) => ({
245
+ type: 'image',
246
+ image: imageData,
247
+ })),
248
+ ] as any, // Type assertion to bypass compiler check
249
+ });
250
  }
251
 
252
  setInput('');
253
  Cookies.remove(PROMPT_COOKIE_KEY);
254
 
255
+ // Add file cleanup here
256
+ setUploadedFiles([]);
257
+ setImageDataList([]);
258
+
259
  resetEnhancer();
260
 
261
  textareaRef.current?.blur();
 
348
  apiKeys,
349
  );
350
  }}
351
+ uploadedFiles={uploadedFiles}
352
+ setUploadedFiles={setUploadedFiles}
353
+ imageDataList={imageDataList}
354
+ setImageDataList={setImageDataList}
355
  />
356
  );
357
  },
app/components/chat/FilePreview.tsx ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React from 'react';
2
+
3
+ interface FilePreviewProps {
4
+ files: File[];
5
+ imageDataList: string[];
6
+ onRemove: (index: number) => void;
7
+ }
8
+
9
+ const FilePreview: React.FC<FilePreviewProps> = ({ files, imageDataList, onRemove }) => {
10
+ if (!files || files.length === 0) {
11
+ return null;
12
+ }
13
+
14
+ return (
15
+ <div className="flex flex-row overflow-x-auto -mt-2">
16
+ {files.map((file, index) => (
17
+ <div key={file.name + file.size} className="mr-2 relative">
18
+ {imageDataList[index] && (
19
+ <div className="relative pt-4 pr-4">
20
+ <img src={imageDataList[index]} alt={file.name} className="max-h-20" />
21
+ <button
22
+ onClick={() => onRemove(index)}
23
+ className="absolute top-1 right-1 z-10 bg-black rounded-full w-5 h-5 shadow-md hover:bg-gray-900 transition-colors flex items-center justify-center"
24
+ >
25
+ <div className="i-ph:x w-3 h-3 text-gray-200" />
26
+ </button>
27
+ </div>
28
+ )}
29
+ </div>
30
+ ))}
31
+ </div>
32
+ );
33
+ };
34
+
35
+ export default FilePreview;
app/components/chat/ImportFolderButton.tsx CHANGED
@@ -79,7 +79,7 @@ ${content}
79
  role: 'assistant',
80
  content: `I'll help you set up these files.${binaryFilesMessage}
81
 
82
- <boltArtifact id="imported-files" title="Imported Files">
83
  ${fileArtifacts.join('\n\n')}
84
  </boltArtifact>`,
85
  id: generateId(),
 
79
  role: 'assistant',
80
  content: `I'll help you set up these files.${binaryFilesMessage}
81
 
82
+ <boltArtifact id="imported-files" title="Imported Files" type="bundled">
83
  ${fileArtifacts.join('\n\n')}
84
  </boltArtifact>`,
85
  id: generateId(),
app/components/chat/ModelSelector.tsx ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import type { ProviderInfo } from '~/types/model';
2
+ import type { ModelInfo } from '~/utils/types';
3
+
4
+ interface ModelSelectorProps {
5
+ model?: string;
6
+ setModel?: (model: string) => void;
7
+ provider?: ProviderInfo;
8
+ setProvider?: (provider: ProviderInfo) => void;
9
+ modelList: ModelInfo[];
10
+ providerList: ProviderInfo[];
11
+ apiKeys: Record<string, string>;
12
+ }
13
+
14
+ export const ModelSelector = ({
15
+ model,
16
+ setModel,
17
+ provider,
18
+ setProvider,
19
+ modelList,
20
+ providerList,
21
+ }: ModelSelectorProps) => {
22
+ return (
23
+ <div className="mb-2 flex gap-2 flex-col sm:flex-row">
24
+ <select
25
+ value={provider?.name ?? ''}
26
+ onChange={(e) => {
27
+ const newProvider = providerList.find((p: ProviderInfo) => p.name === e.target.value);
28
+
29
+ if (newProvider && setProvider) {
30
+ setProvider(newProvider);
31
+ }
32
+
33
+ const firstModel = [...modelList].find((m) => m.provider === e.target.value);
34
+
35
+ if (firstModel && setModel) {
36
+ setModel(firstModel.name);
37
+ }
38
+ }}
39
+ className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all"
40
+ >
41
+ {providerList.map((provider: ProviderInfo) => (
42
+ <option key={provider.name} value={provider.name}>
43
+ {provider.name}
44
+ </option>
45
+ ))}
46
+ </select>
47
+ <select
48
+ key={provider?.name}
49
+ value={model}
50
+ onChange={(e) => setModel?.(e.target.value)}
51
+ className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all lg:max-w-[70%]"
52
+ >
53
+ {[...modelList]
54
+ .filter((e) => e.provider == provider?.name && e.name)
55
+ .map((modelOption) => (
56
+ <option key={modelOption.name} value={modelOption.name}>
57
+ {modelOption.label}
58
+ </option>
59
+ ))}
60
+ </select>
61
+ </div>
62
+ );
63
+ };
app/components/chat/SendButton.client.tsx CHANGED
@@ -4,11 +4,12 @@ interface SendButtonProps {
4
  show: boolean;
5
  isStreaming?: boolean;
6
  onClick?: (event: React.MouseEvent<HTMLButtonElement, MouseEvent>) => void;
 
7
  }
8
 
9
  const customEasingFn = cubicBezier(0.4, 0, 0.2, 1);
10
 
11
- export function SendButton({ show, isStreaming, onClick }: SendButtonProps) {
12
  return (
13
  <AnimatePresence>
14
  {show ? (
@@ -30,4 +31,4 @@ export function SendButton({ show, isStreaming, onClick }: SendButtonProps) {
30
  ) : null}
31
  </AnimatePresence>
32
  );
33
- }
 
4
  show: boolean;
5
  isStreaming?: boolean;
6
  onClick?: (event: React.MouseEvent<HTMLButtonElement, MouseEvent>) => void;
7
+ onImagesSelected?: (images: File[]) => void;
8
  }
9
 
10
  const customEasingFn = cubicBezier(0.4, 0, 0.2, 1);
11
 
12
+ export const SendButton = ({ show, isStreaming, onClick }: SendButtonProps) => {
13
  return (
14
  <AnimatePresence>
15
  {show ? (
 
31
  ) : null}
32
  </AnimatePresence>
33
  );
34
+ };
app/components/chat/SpeechRecognition.tsx ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { IconButton } from '~/components/ui/IconButton';
2
+ import { classNames } from '~/utils/classNames';
3
+ import React from 'react';
4
+
5
+ export const SpeechRecognitionButton = ({
6
+ isListening,
7
+ onStart,
8
+ onStop,
9
+ disabled,
10
+ }: {
11
+ isListening: boolean;
12
+ onStart: () => void;
13
+ onStop: () => void;
14
+ disabled: boolean;
15
+ }) => {
16
+ return (
17
+ <IconButton
18
+ title={isListening ? 'Stop listening' : 'Start speech recognition'}
19
+ disabled={disabled}
20
+ className={classNames('transition-all', {
21
+ 'text-bolt-elements-item-contentAccent': isListening,
22
+ })}
23
+ onClick={isListening ? onStop : onStart}
24
+ >
25
+ {isListening ? <div className="i-ph:microphone-slash text-xl" /> : <div className="i-ph:microphone text-xl" />}
26
+ </IconButton>
27
+ );
28
+ };
app/components/chat/UserMessage.tsx CHANGED
@@ -2,26 +2,52 @@
2
  * @ts-nocheck
3
  * Preventing TS checks with files presented in the video for a better presentation.
4
  */
5
- import { modificationsRegex } from '~/utils/diff';
6
  import { MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';
7
  import { Markdown } from './Markdown';
8
 
9
  interface UserMessageProps {
10
- content: string;
11
  }
12
 
13
  export function UserMessage({ content }: UserMessageProps) {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  return (
15
  <div className="overflow-hidden pt-[4px]">
16
- <Markdown limitedMarkdown>{sanitizeUserMessage(content)}</Markdown>
17
  </div>
18
  );
19
  }
20
 
21
  function sanitizeUserMessage(content: string) {
22
- return content
23
- .replace(modificationsRegex, '')
24
- .replace(MODEL_REGEX, 'Using: $1')
25
- .replace(PROVIDER_REGEX, ' ($1)\n\n')
26
- .trim();
27
  }
 
2
  * @ts-nocheck
3
  * Preventing TS checks with files presented in the video for a better presentation.
4
  */
 
5
  import { MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';
6
  import { Markdown } from './Markdown';
7
 
8
  interface UserMessageProps {
9
+ content: string | Array<{ type: string; text?: string; image?: string }>;
10
  }
11
 
12
  export function UserMessage({ content }: UserMessageProps) {
13
+ if (Array.isArray(content)) {
14
+ const textItem = content.find((item) => item.type === 'text');
15
+ const textContent = sanitizeUserMessage(textItem?.text || '');
16
+ const images = content.filter((item) => item.type === 'image' && item.image);
17
+
18
+ return (
19
+ <div className="overflow-hidden pt-[4px]">
20
+ <div className="flex items-start gap-4">
21
+ <div className="flex-1">
22
+ <Markdown limitedMarkdown>{textContent}</Markdown>
23
+ </div>
24
+ {images.length > 0 && (
25
+ <div className="flex-shrink-0 w-[160px]">
26
+ {images.map((item, index) => (
27
+ <div key={index} className="relative">
28
+ <img
29
+ src={item.image}
30
+ alt={`Uploaded image ${index + 1}`}
31
+ className="w-full h-[160px] rounded-lg object-cover border border-bolt-elements-borderColor"
32
+ />
33
+ </div>
34
+ ))}
35
+ </div>
36
+ )}
37
+ </div>
38
+ </div>
39
+ );
40
+ }
41
+
42
+ const textContent = sanitizeUserMessage(content);
43
+
44
  return (
45
  <div className="overflow-hidden pt-[4px]">
46
+ <Markdown limitedMarkdown>{textContent}</Markdown>
47
  </div>
48
  );
49
  }
50
 
51
  function sanitizeUserMessage(content: string) {
52
+ return content.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '');
 
 
 
 
53
  }
app/components/sidebar/Menu.client.tsx CHANGED
@@ -33,7 +33,7 @@ const menuVariants = {
33
 
34
  type DialogContent = { type: 'delete'; item: ChatHistoryItem } | null;
35
 
36
- export function Menu() {
37
  const { duplicateCurrentChat, exportChat } = useChatHistory();
38
  const menuRef = useRef<HTMLDivElement>(null);
39
  const [list, setList] = useState<ChatHistoryItem[]>([]);
@@ -206,4 +206,4 @@ export function Menu() {
206
  </div>
207
  </motion.div>
208
  );
209
- }
 
33
 
34
  type DialogContent = { type: 'delete'; item: ChatHistoryItem } | null;
35
 
36
+ export const Menu = () => {
37
  const { duplicateCurrentChat, exportChat } = useChatHistory();
38
  const menuRef = useRef<HTMLDivElement>(null);
39
  const [list, setList] = useState<ChatHistoryItem[]>([]);
 
206
  </div>
207
  </motion.div>
208
  );
209
+ };
app/components/workbench/Preview.tsx CHANGED
@@ -4,11 +4,16 @@ import { IconButton } from '~/components/ui/IconButton';
4
  import { workbenchStore } from '~/lib/stores/workbench';
5
  import { PortDropdown } from './PortDropdown';
6
 
 
 
7
  export const Preview = memo(() => {
8
  const iframeRef = useRef<HTMLIFrameElement>(null);
 
9
  const inputRef = useRef<HTMLInputElement>(null);
 
10
  const [activePreviewIndex, setActivePreviewIndex] = useState(0);
11
  const [isPortDropdownOpen, setIsPortDropdownOpen] = useState(false);
 
12
  const hasSelectedPreview = useRef(false);
13
  const previews = useStore(workbenchStore.previews);
14
  const activePreview = previews[activePreviewIndex];
@@ -16,6 +21,23 @@ export const Preview = memo(() => {
16
  const [url, setUrl] = useState('');
17
  const [iframeUrl, setIframeUrl] = useState<string | undefined>();
18
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
19
  useEffect(() => {
20
  if (!activePreview) {
21
  setUrl('');
@@ -25,10 +47,9 @@ export const Preview = memo(() => {
25
  }
26
 
27
  const { baseUrl } = activePreview;
28
-
29
  setUrl(baseUrl);
30
  setIframeUrl(baseUrl);
31
- }, [activePreview, iframeUrl]);
32
 
33
  const validateUrl = useCallback(
34
  (value: string) => {
@@ -56,14 +77,13 @@ export const Preview = memo(() => {
56
  [],
57
  );
58
 
59
- // when previews change, display the lowest port if user hasn't selected a preview
60
  useEffect(() => {
61
  if (previews.length > 1 && !hasSelectedPreview.current) {
62
  const minPortIndex = previews.reduce(findMinPortIndex, 0);
63
-
64
  setActivePreviewIndex(minPortIndex);
65
  }
66
- }, [previews]);
67
 
68
  const reloadPreview = () => {
69
  if (iframeRef.current) {
@@ -71,13 +91,134 @@ export const Preview = memo(() => {
71
  }
72
  };
73
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
74
  return (
75
- <div className="w-full h-full flex flex-col">
76
  {isPortDropdownOpen && (
77
  <div className="z-iframe-overlay w-full h-full absolute" onClick={() => setIsPortDropdownOpen(false)} />
78
  )}
79
  <div className="bg-bolt-elements-background-depth-2 p-2 flex items-center gap-1.5">
80
  <IconButton icon="i-ph:arrow-clockwise" onClick={reloadPreview} />
 
81
  <div
82
  className="flex items-center gap-1 flex-grow bg-bolt-elements-preview-addressBar-background border border-bolt-elements-borderColor text-bolt-elements-preview-addressBar-text rounded-full px-3 py-1 text-sm hover:bg-bolt-elements-preview-addressBar-backgroundHover hover:focus-within:bg-bolt-elements-preview-addressBar-backgroundActive focus-within:bg-bolt-elements-preview-addressBar-backgroundActive
83
  focus-within-border-bolt-elements-borderColorActive focus-within:text-bolt-elements-preview-addressBar-textActive"
@@ -101,6 +242,7 @@ export const Preview = memo(() => {
101
  }}
102
  />
103
  </div>
 
104
  {previews.length > 1 && (
105
  <PortDropdown
106
  activePreviewIndex={activePreviewIndex}
@@ -111,13 +253,93 @@ export const Preview = memo(() => {
111
  previews={previews}
112
  />
113
  )}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
114
  </div>
115
- <div className="flex-1 border-t border-bolt-elements-borderColor">
116
- {activePreview ? (
117
- <iframe ref={iframeRef} className="border-none w-full h-full bg-white" src={iframeUrl} />
118
- ) : (
119
- <div className="flex w-full h-full justify-center items-center bg-white">No preview available</div>
120
- )}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
121
  </div>
122
  </div>
123
  );
 
4
  import { workbenchStore } from '~/lib/stores/workbench';
5
  import { PortDropdown } from './PortDropdown';
6
 
7
+ type ResizeSide = 'left' | 'right' | null;
8
+
9
  export const Preview = memo(() => {
10
  const iframeRef = useRef<HTMLIFrameElement>(null);
11
+ const containerRef = useRef<HTMLDivElement>(null);
12
  const inputRef = useRef<HTMLInputElement>(null);
13
+
14
  const [activePreviewIndex, setActivePreviewIndex] = useState(0);
15
  const [isPortDropdownOpen, setIsPortDropdownOpen] = useState(false);
16
+ const [isFullscreen, setIsFullscreen] = useState(false);
17
  const hasSelectedPreview = useRef(false);
18
  const previews = useStore(workbenchStore.previews);
19
  const activePreview = previews[activePreviewIndex];
 
21
  const [url, setUrl] = useState('');
22
  const [iframeUrl, setIframeUrl] = useState<string | undefined>();
23
 
24
+ // Toggle between responsive mode and device mode
25
+ const [isDeviceModeOn, setIsDeviceModeOn] = useState(false);
26
+
27
+ // Use percentage for width
28
+ const [widthPercent, setWidthPercent] = useState<number>(37.5); // 375px assuming 1000px window width initially
29
+
30
+ const resizingState = useRef({
31
+ isResizing: false,
32
+ side: null as ResizeSide,
33
+ startX: 0,
34
+ startWidthPercent: 37.5,
35
+ windowWidth: window.innerWidth,
36
+ });
37
+
38
+ // Define the scaling factor
39
+ const SCALING_FACTOR = 2; // Adjust this value to increase/decrease sensitivity
40
+
41
  useEffect(() => {
42
  if (!activePreview) {
43
  setUrl('');
 
47
  }
48
 
49
  const { baseUrl } = activePreview;
 
50
  setUrl(baseUrl);
51
  setIframeUrl(baseUrl);
52
+ }, [activePreview]);
53
 
54
  const validateUrl = useCallback(
55
  (value: string) => {
 
77
  [],
78
  );
79
 
80
+ // When previews change, display the lowest port if user hasn't selected a preview
81
  useEffect(() => {
82
  if (previews.length > 1 && !hasSelectedPreview.current) {
83
  const minPortIndex = previews.reduce(findMinPortIndex, 0);
 
84
  setActivePreviewIndex(minPortIndex);
85
  }
86
+ }, [previews, findMinPortIndex]);
87
 
88
  const reloadPreview = () => {
89
  if (iframeRef.current) {
 
91
  }
92
  };
93
 
94
+ const toggleFullscreen = async () => {
95
+ if (!isFullscreen && containerRef.current) {
96
+ await containerRef.current.requestFullscreen();
97
+ } else if (document.fullscreenElement) {
98
+ await document.exitFullscreen();
99
+ }
100
+ };
101
+
102
+ useEffect(() => {
103
+ const handleFullscreenChange = () => {
104
+ setIsFullscreen(!!document.fullscreenElement);
105
+ };
106
+
107
+ document.addEventListener('fullscreenchange', handleFullscreenChange);
108
+
109
+ return () => {
110
+ document.removeEventListener('fullscreenchange', handleFullscreenChange);
111
+ };
112
+ }, []);
113
+
114
+ const toggleDeviceMode = () => {
115
+ setIsDeviceModeOn((prev) => !prev);
116
+ };
117
+
118
+ const startResizing = (e: React.MouseEvent, side: ResizeSide) => {
119
+ if (!isDeviceModeOn) {
120
+ return;
121
+ }
122
+
123
+ // Prevent text selection
124
+ document.body.style.userSelect = 'none';
125
+
126
+ resizingState.current.isResizing = true;
127
+ resizingState.current.side = side;
128
+ resizingState.current.startX = e.clientX;
129
+ resizingState.current.startWidthPercent = widthPercent;
130
+ resizingState.current.windowWidth = window.innerWidth;
131
+
132
+ document.addEventListener('mousemove', onMouseMove);
133
+ document.addEventListener('mouseup', onMouseUp);
134
+
135
+ e.preventDefault(); // Prevent any text selection on mousedown
136
+ };
137
+
138
+ const onMouseMove = (e: MouseEvent) => {
139
+ if (!resizingState.current.isResizing) {
140
+ return;
141
+ }
142
+
143
+ const dx = e.clientX - resizingState.current.startX;
144
+ const windowWidth = resizingState.current.windowWidth;
145
+
146
+ // Apply scaling factor to increase sensitivity
147
+ const dxPercent = (dx / windowWidth) * 100 * SCALING_FACTOR;
148
+
149
+ let newWidthPercent = resizingState.current.startWidthPercent;
150
+
151
+ if (resizingState.current.side === 'right') {
152
+ newWidthPercent = resizingState.current.startWidthPercent + dxPercent;
153
+ } else if (resizingState.current.side === 'left') {
154
+ newWidthPercent = resizingState.current.startWidthPercent - dxPercent;
155
+ }
156
+
157
+ // Clamp the width between 10% and 90%
158
+ newWidthPercent = Math.max(10, Math.min(newWidthPercent, 90));
159
+
160
+ setWidthPercent(newWidthPercent);
161
+ };
162
+
163
+ const onMouseUp = () => {
164
+ resizingState.current.isResizing = false;
165
+ resizingState.current.side = null;
166
+ document.removeEventListener('mousemove', onMouseMove);
167
+ document.removeEventListener('mouseup', onMouseUp);
168
+
169
+ // Restore text selection
170
+ document.body.style.userSelect = '';
171
+ };
172
+
173
+ // Handle window resize to ensure widthPercent remains valid
174
+ useEffect(() => {
175
+ const handleWindowResize = () => {
176
+ /*
177
+ * Optional: Adjust widthPercent if necessary
178
+ * For now, since widthPercent is relative, no action is needed
179
+ */
180
+ };
181
+
182
+ window.addEventListener('resize', handleWindowResize);
183
+
184
+ return () => {
185
+ window.removeEventListener('resize', handleWindowResize);
186
+ };
187
+ }, []);
188
+
189
+ // A small helper component for the handle's "grip" icon
190
+ const GripIcon = () => (
191
+ <div
192
+ style={{
193
+ display: 'flex',
194
+ justifyContent: 'center',
195
+ alignItems: 'center',
196
+ height: '100%',
197
+ pointerEvents: 'none',
198
+ }}
199
+ >
200
+ <div
201
+ style={{
202
+ color: 'rgba(0,0,0,0.5)',
203
+ fontSize: '10px',
204
+ lineHeight: '5px',
205
+ userSelect: 'none',
206
+ marginLeft: '1px',
207
+ }}
208
+ >
209
+ ••• •••
210
+ </div>
211
+ </div>
212
+ );
213
+
214
  return (
215
+ <div ref={containerRef} className="w-full h-full flex flex-col relative">
216
  {isPortDropdownOpen && (
217
  <div className="z-iframe-overlay w-full h-full absolute" onClick={() => setIsPortDropdownOpen(false)} />
218
  )}
219
  <div className="bg-bolt-elements-background-depth-2 p-2 flex items-center gap-1.5">
220
  <IconButton icon="i-ph:arrow-clockwise" onClick={reloadPreview} />
221
+
222
  <div
223
  className="flex items-center gap-1 flex-grow bg-bolt-elements-preview-addressBar-background border border-bolt-elements-borderColor text-bolt-elements-preview-addressBar-text rounded-full px-3 py-1 text-sm hover:bg-bolt-elements-preview-addressBar-backgroundHover hover:focus-within:bg-bolt-elements-preview-addressBar-backgroundActive focus-within:bg-bolt-elements-preview-addressBar-backgroundActive
224
  focus-within-border-bolt-elements-borderColorActive focus-within:text-bolt-elements-preview-addressBar-textActive"
 
242
  }}
243
  />
244
  </div>
245
+
246
  {previews.length > 1 && (
247
  <PortDropdown
248
  activePreviewIndex={activePreviewIndex}
 
253
  previews={previews}
254
  />
255
  )}
256
+
257
+ {/* Device mode toggle button */}
258
+ <IconButton
259
+ icon="i-ph:devices"
260
+ onClick={toggleDeviceMode}
261
+ title={isDeviceModeOn ? 'Switch to Responsive Mode' : 'Switch to Device Mode'}
262
+ />
263
+
264
+ {/* Fullscreen toggle button */}
265
+ <IconButton
266
+ icon={isFullscreen ? 'i-ph:arrows-in' : 'i-ph:arrows-out'}
267
+ onClick={toggleFullscreen}
268
+ title={isFullscreen ? 'Exit Full Screen' : 'Full Screen'}
269
+ />
270
  </div>
271
+
272
+ <div className="flex-1 border-t border-bolt-elements-borderColor flex justify-center items-center overflow-auto">
273
+ <div
274
+ style={{
275
+ width: isDeviceModeOn ? `${widthPercent}%` : '100%',
276
+ height: '100%', // Always full height
277
+ overflow: 'visible',
278
+ background: '#fff',
279
+ position: 'relative',
280
+ display: 'flex',
281
+ }}
282
+ >
283
+ {activePreview ? (
284
+ <iframe ref={iframeRef} className="border-none w-full h-full bg-white" src={iframeUrl} allowFullScreen />
285
+ ) : (
286
+ <div className="flex w-full h-full justify-center items-center bg-white">No preview available</div>
287
+ )}
288
+
289
+ {isDeviceModeOn && (
290
+ <>
291
+ {/* Left handle */}
292
+ <div
293
+ onMouseDown={(e) => startResizing(e, 'left')}
294
+ style={{
295
+ position: 'absolute',
296
+ top: 0,
297
+ left: 0,
298
+ width: '15px',
299
+ marginLeft: '-15px',
300
+ height: '100%',
301
+ cursor: 'ew-resize',
302
+ background: 'rgba(255,255,255,.2)',
303
+ display: 'flex',
304
+ alignItems: 'center',
305
+ justifyContent: 'center',
306
+ transition: 'background 0.2s',
307
+ userSelect: 'none',
308
+ }}
309
+ onMouseOver={(e) => (e.currentTarget.style.background = 'rgba(255,255,255,.5)')}
310
+ onMouseOut={(e) => (e.currentTarget.style.background = 'rgba(255,255,255,.2)')}
311
+ title="Drag to resize width"
312
+ >
313
+ <GripIcon />
314
+ </div>
315
+
316
+ {/* Right handle */}
317
+ <div
318
+ onMouseDown={(e) => startResizing(e, 'right')}
319
+ style={{
320
+ position: 'absolute',
321
+ top: 0,
322
+ right: 0,
323
+ width: '15px',
324
+ marginRight: '-15px',
325
+ height: '100%',
326
+ cursor: 'ew-resize',
327
+ background: 'rgba(255,255,255,.2)',
328
+ display: 'flex',
329
+ alignItems: 'center',
330
+ justifyContent: 'center',
331
+ transition: 'background 0.2s',
332
+ userSelect: 'none',
333
+ }}
334
+ onMouseOver={(e) => (e.currentTarget.style.background = 'rgba(255,255,255,.5)')}
335
+ onMouseOut={(e) => (e.currentTarget.style.background = 'rgba(255,255,255,.2)')}
336
+ title="Drag to resize width"
337
+ >
338
+ <GripIcon />
339
+ </div>
340
+ </>
341
+ )}
342
+ </div>
343
  </div>
344
  </div>
345
  );
app/lib/.server/llm/api-key.ts CHANGED
@@ -51,7 +51,7 @@ export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Re
51
  export function getBaseURL(cloudflareEnv: Env, provider: string) {
52
  switch (provider) {
53
  case 'Together':
54
- return env.TOGETHER_API_BASE_URL || cloudflareEnv.TOGETHER_API_BASE_URL;
55
  case 'OpenAILike':
56
  return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
57
  case 'LMStudio':
 
51
  export function getBaseURL(cloudflareEnv: Env, provider: string) {
52
  switch (provider) {
53
  case 'Together':
54
+ return env.TOGETHER_API_BASE_URL || cloudflareEnv.TOGETHER_API_BASE_URL || 'https://api.together.xyz/v1';
55
  case 'OpenAILike':
56
  return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
57
  case 'LMStudio':
app/lib/.server/llm/model.ts CHANGED
@@ -128,7 +128,12 @@ export function getXAIModel(apiKey: OptionalApiKey, model: string) {
128
  }
129
 
130
  export function getModel(provider: string, model: string, env: Env, apiKeys?: Record<string, string>) {
131
- const apiKey = getAPIKey(env, provider, apiKeys);
 
 
 
 
 
132
  const baseURL = getBaseURL(env, provider);
133
 
134
  switch (provider) {
 
128
  }
129
 
130
  export function getModel(provider: string, model: string, env: Env, apiKeys?: Record<string, string>) {
131
+ /*
132
+ * let apiKey; // Declare first
133
+ * let baseURL;
134
+ */
135
+
136
+ const apiKey = getAPIKey(env, provider, apiKeys); // Then assign
137
  const baseURL = getBaseURL(env, provider);
138
 
139
  switch (provider) {
app/lib/.server/llm/stream-text.ts CHANGED
@@ -1,11 +1,8 @@
1
- // eslint-disable-next-line @typescript-eslint/ban-ts-comment
2
- // @ts-nocheck – TODO: Provider proper types
3
-
4
  import { convertToCoreMessages, streamText as _streamText } from 'ai';
5
  import { getModel } from '~/lib/.server/llm/model';
6
  import { MAX_TOKENS } from './constants';
7
  import { getSystemPrompt } from './prompts';
8
- import { DEFAULT_MODEL, DEFAULT_PROVIDER, MODEL_LIST, MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';
9
 
10
  interface ToolResult<Name extends string, Args, Result> {
11
  toolCallId: string;
@@ -26,24 +23,50 @@ export type Messages = Message[];
26
  export type StreamingOptions = Omit<Parameters<typeof _streamText>[0], 'model'>;
27
 
28
  function extractPropertiesFromMessage(message: Message): { model: string; provider: string; content: string } {
29
- // Extract model
30
- const modelMatch = message.content.match(MODEL_REGEX);
31
- const model = modelMatch ? modelMatch[1] : DEFAULT_MODEL;
32
 
33
- // Extract provider
34
- const providerMatch = message.content.match(PROVIDER_REGEX);
35
- const provider = providerMatch ? providerMatch[1] : DEFAULT_PROVIDER;
36
 
37
- // Remove model and provider lines from content
38
- const cleanedContent = message.content.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '').trim();
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
 
40
  return { model, provider, content: cleanedContent };
41
  }
42
 
43
- export function streamText(messages: Messages, env: Env, options?: StreamingOptions, apiKeys?: Record<string, string>) {
 
 
 
 
 
44
  let currentModel = DEFAULT_MODEL;
45
- let currentProvider = DEFAULT_PROVIDER;
46
-
47
  const processedMessages = messages.map((message) => {
48
  if (message.role === 'user') {
49
  const { model, provider, content } = extractPropertiesFromMessage(message);
@@ -65,10 +88,10 @@ export function streamText(messages: Messages, env: Env, options?: StreamingOpti
65
  const dynamicMaxTokens = modelDetails && modelDetails.maxTokenAllowed ? modelDetails.maxTokenAllowed : MAX_TOKENS;
66
 
67
  return _streamText({
68
- model: getModel(currentProvider, currentModel, env, apiKeys),
69
  system: getSystemPrompt(),
70
  maxTokens: dynamicMaxTokens,
71
- messages: convertToCoreMessages(processedMessages),
72
  ...options,
73
  });
74
  }
 
 
 
 
1
  import { convertToCoreMessages, streamText as _streamText } from 'ai';
2
  import { getModel } from '~/lib/.server/llm/model';
3
  import { MAX_TOKENS } from './constants';
4
  import { getSystemPrompt } from './prompts';
5
+ import { DEFAULT_MODEL, DEFAULT_PROVIDER, getModelList, MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';
6
 
7
  interface ToolResult<Name extends string, Args, Result> {
8
  toolCallId: string;
 
23
  export type StreamingOptions = Omit<Parameters<typeof _streamText>[0], 'model'>;
24
 
25
  function extractPropertiesFromMessage(message: Message): { model: string; provider: string; content: string } {
26
+ const textContent = Array.isArray(message.content)
27
+ ? message.content.find((item) => item.type === 'text')?.text || ''
28
+ : message.content;
29
 
30
+ const modelMatch = textContent.match(MODEL_REGEX);
31
+ const providerMatch = textContent.match(PROVIDER_REGEX);
 
32
 
33
+ /*
34
+ * Extract model
35
+ * const modelMatch = message.content.match(MODEL_REGEX);
36
+ */
37
+ const model = modelMatch ? modelMatch[1] : DEFAULT_MODEL;
38
+
39
+ /*
40
+ * Extract provider
41
+ * const providerMatch = message.content.match(PROVIDER_REGEX);
42
+ */
43
+ const provider = providerMatch ? providerMatch[1] : DEFAULT_PROVIDER.name;
44
+
45
+ const cleanedContent = Array.isArray(message.content)
46
+ ? message.content.map((item) => {
47
+ if (item.type === 'text') {
48
+ return {
49
+ type: 'text',
50
+ text: item.text?.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, ''),
51
+ };
52
+ }
53
+
54
+ return item; // Preserve image_url and other types as is
55
+ })
56
+ : textContent.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '');
57
 
58
  return { model, provider, content: cleanedContent };
59
  }
60
 
61
+ export async function streamText(
62
+ messages: Messages,
63
+ env: Env,
64
+ options?: StreamingOptions,
65
+ apiKeys?: Record<string, string>,
66
+ ) {
67
  let currentModel = DEFAULT_MODEL;
68
+ let currentProvider = DEFAULT_PROVIDER.name;
69
+ const MODEL_LIST = await getModelList(apiKeys || {});
70
  const processedMessages = messages.map((message) => {
71
  if (message.role === 'user') {
72
  const { model, provider, content } = extractPropertiesFromMessage(message);
 
88
  const dynamicMaxTokens = modelDetails && modelDetails.maxTokenAllowed ? modelDetails.maxTokenAllowed : MAX_TOKENS;
89
 
90
  return _streamText({
91
+ model: getModel(currentProvider, currentModel, env, apiKeys) as any,
92
  system: getSystemPrompt(),
93
  maxTokens: dynamicMaxTokens,
94
+ messages: convertToCoreMessages(processedMessages as any),
95
  ...options,
96
  });
97
  }
app/lib/runtime/__snapshots__/message-parser.spec.ts.snap CHANGED
@@ -29,6 +29,7 @@ exports[`StreamingMessageParser > valid artifacts with actions > should correctl
29
  "id": "artifact_1",
30
  "messageId": "message_1",
31
  "title": "Some title",
 
32
  }
33
  `;
34
 
@@ -37,6 +38,7 @@ exports[`StreamingMessageParser > valid artifacts with actions > should correctl
37
  "id": "artifact_1",
38
  "messageId": "message_1",
39
  "title": "Some title",
 
40
  }
41
  `;
42
 
@@ -96,6 +98,7 @@ exports[`StreamingMessageParser > valid artifacts with actions > should correctl
96
  "id": "artifact_1",
97
  "messageId": "message_1",
98
  "title": "Some title",
 
99
  }
100
  `;
101
 
@@ -104,6 +107,7 @@ exports[`StreamingMessageParser > valid artifacts with actions > should correctl
104
  "id": "artifact_1",
105
  "messageId": "message_1",
106
  "title": "Some title",
 
107
  }
108
  `;
109
 
@@ -112,6 +116,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
112
  "id": "artifact_1",
113
  "messageId": "message_1",
114
  "title": "Some title",
 
115
  }
116
  `;
117
 
@@ -120,6 +125,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
120
  "id": "artifact_1",
121
  "messageId": "message_1",
122
  "title": "Some title",
 
123
  }
124
  `;
125
 
@@ -128,6 +134,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
128
  "id": "artifact_1",
129
  "messageId": "message_1",
130
  "title": "Some title",
 
131
  }
132
  `;
133
 
@@ -136,6 +143,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
136
  "id": "artifact_1",
137
  "messageId": "message_1",
138
  "title": "Some title",
 
139
  }
140
  `;
141
 
@@ -144,6 +152,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
144
  "id": "artifact_1",
145
  "messageId": "message_1",
146
  "title": "Some title",
 
147
  }
148
  `;
149
 
@@ -152,6 +161,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
152
  "id": "artifact_1",
153
  "messageId": "message_1",
154
  "title": "Some title",
 
155
  }
156
  `;
157
 
@@ -160,6 +170,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
160
  "id": "artifact_1",
161
  "messageId": "message_1",
162
  "title": "Some title",
 
163
  }
164
  `;
165
 
@@ -168,6 +179,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
168
  "id": "artifact_1",
169
  "messageId": "message_1",
170
  "title": "Some title",
 
171
  }
172
  `;
173
 
@@ -176,6 +188,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
176
  "id": "artifact_1",
177
  "messageId": "message_1",
178
  "title": "Some title",
 
179
  }
180
  `;
181
 
@@ -184,6 +197,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
184
  "id": "artifact_1",
185
  "messageId": "message_1",
186
  "title": "Some title",
 
187
  }
188
  `;
189
 
@@ -192,6 +206,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
192
  "id": "artifact_1",
193
  "messageId": "message_1",
194
  "title": "Some title",
 
195
  }
196
  `;
197
 
@@ -200,6 +215,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
200
  "id": "artifact_1",
201
  "messageId": "message_1",
202
  "title": "Some title",
 
203
  }
204
  `;
205
 
@@ -208,6 +224,7 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
208
  "id": "artifact_1",
209
  "messageId": "message_1",
210
  "title": "Some title",
 
211
  }
212
  `;
213
 
@@ -216,5 +233,6 @@ exports[`StreamingMessageParser > valid artifacts without actions > should corre
216
  "id": "artifact_1",
217
  "messageId": "message_1",
218
  "title": "Some title",
 
219
  }
220
  `;
 
29
  "id": "artifact_1",
30
  "messageId": "message_1",
31
  "title": "Some title",
32
+ "type": undefined,
33
  }
34
  `;
35
 
 
38
  "id": "artifact_1",
39
  "messageId": "message_1",
40
  "title": "Some title",
41
+ "type": undefined,
42
  }
43
  `;
44
 
 
98
  "id": "artifact_1",
99
  "messageId": "message_1",
100
  "title": "Some title",
101
+ "type": undefined,
102
  }
103
  `;
104
 
 
107
  "id": "artifact_1",
108
  "messageId": "message_1",
109
  "title": "Some title",
110
+ "type": undefined,
111
  }
112
  `;
113
 
 
116
  "id": "artifact_1",
117
  "messageId": "message_1",
118
  "title": "Some title",
119
+ "type": undefined,
120
  }
121
  `;
122
 
 
125
  "id": "artifact_1",
126
  "messageId": "message_1",
127
  "title": "Some title",
128
+ "type": undefined,
129
  }
130
  `;
131
 
 
134
  "id": "artifact_1",
135
  "messageId": "message_1",
136
  "title": "Some title",
137
+ "type": "bundled",
138
  }
139
  `;
140
 
 
143
  "id": "artifact_1",
144
  "messageId": "message_1",
145
  "title": "Some title",
146
+ "type": "bundled",
147
  }
148
  `;
149
 
 
152
  "id": "artifact_1",
153
  "messageId": "message_1",
154
  "title": "Some title",
155
+ "type": undefined,
156
  }
157
  `;
158
 
 
161
  "id": "artifact_1",
162
  "messageId": "message_1",
163
  "title": "Some title",
164
+ "type": undefined,
165
  }
166
  `;
167
 
 
170
  "id": "artifact_1",
171
  "messageId": "message_1",
172
  "title": "Some title",
173
+ "type": undefined,
174
  }
175
  `;
176
 
 
179
  "id": "artifact_1",
180
  "messageId": "message_1",
181
  "title": "Some title",
182
+ "type": undefined,
183
  }
184
  `;
185
 
 
188
  "id": "artifact_1",
189
  "messageId": "message_1",
190
  "title": "Some title",
191
+ "type": undefined,
192
  }
193
  `;
194
 
 
197
  "id": "artifact_1",
198
  "messageId": "message_1",
199
  "title": "Some title",
200
+ "type": undefined,
201
  }
202
  `;
203
 
 
206
  "id": "artifact_1",
207
  "messageId": "message_1",
208
  "title": "Some title",
209
+ "type": undefined,
210
  }
211
  `;
212
 
 
215
  "id": "artifact_1",
216
  "messageId": "message_1",
217
  "title": "Some title",
218
+ "type": undefined,
219
  }
220
  `;
221
 
 
224
  "id": "artifact_1",
225
  "messageId": "message_1",
226
  "title": "Some title",
227
+ "type": undefined,
228
  }
229
  `;
230
 
 
233
  "id": "artifact_1",
234
  "messageId": "message_1",
235
  "title": "Some title",
236
+ "type": undefined,
237
  }
238
  `;
app/lib/runtime/message-parser.spec.ts CHANGED
@@ -59,7 +59,11 @@ describe('StreamingMessageParser', () => {
59
  },
60
  ],
61
  [
62
- ['Some text before <boltArti', 'fact', ' title="Some title" id="artifact_1">foo</boltArtifact> Some more text'],
 
 
 
 
63
  {
64
  output: 'Some text before Some more text',
65
  callbacks: { onArtifactOpen: 1, onArtifactClose: 1, onActionOpen: 0, onActionClose: 0 },
 
59
  },
60
  ],
61
  [
62
+ [
63
+ 'Some text before <boltArti',
64
+ 'fact',
65
+ ' title="Some title" id="artifact_1" type="bundled" >foo</boltArtifact> Some more text',
66
+ ],
67
  {
68
  output: 'Some text before Some more text',
69
  callbacks: { onArtifactOpen: 1, onArtifactClose: 1, onActionOpen: 0, onActionClose: 0 },
app/lib/runtime/message-parser.ts CHANGED
@@ -192,6 +192,7 @@ export class StreamingMessageParser {
192
  const artifactTag = input.slice(i, openTagEnd + 1);
193
 
194
  const artifactTitle = this.#extractAttribute(artifactTag, 'title') as string;
 
195
  const artifactId = this.#extractAttribute(artifactTag, 'id') as string;
196
 
197
  if (!artifactTitle) {
@@ -207,6 +208,7 @@ export class StreamingMessageParser {
207
  const currentArtifact = {
208
  id: artifactId,
209
  title: artifactTitle,
 
210
  } satisfies BoltArtifactData;
211
 
212
  state.currentArtifact = currentArtifact;
 
192
  const artifactTag = input.slice(i, openTagEnd + 1);
193
 
194
  const artifactTitle = this.#extractAttribute(artifactTag, 'title') as string;
195
+ const type = this.#extractAttribute(artifactTag, 'type') as string;
196
  const artifactId = this.#extractAttribute(artifactTag, 'id') as string;
197
 
198
  if (!artifactTitle) {
 
208
  const currentArtifact = {
209
  id: artifactId,
210
  title: artifactTitle,
211
+ type,
212
  } satisfies BoltArtifactData;
213
 
214
  state.currentArtifact = currentArtifact;
app/lib/stores/files.ts CHANGED
@@ -212,9 +212,5 @@ function isBinaryFile(buffer: Uint8Array | undefined) {
212
  * array buffer.
213
  */
214
  function convertToBuffer(view: Uint8Array): Buffer {
215
- const buffer = new Uint8Array(view.buffer, view.byteOffset, view.byteLength);
216
-
217
- Object.setPrototypeOf(buffer, Buffer.prototype);
218
-
219
- return buffer as Buffer;
220
  }
 
212
  * array buffer.
213
  */
214
  function convertToBuffer(view: Uint8Array): Buffer {
215
+ return Buffer.from(view.buffer, view.byteOffset, view.byteLength);
 
 
 
 
216
  }
app/lib/stores/workbench.ts CHANGED
@@ -19,6 +19,7 @@ import { description } from '~/lib/persistence';
19
  export interface ArtifactState {
20
  id: string;
21
  title: string;
 
22
  closed: boolean;
23
  runner: ActionRunner;
24
  }
@@ -230,7 +231,7 @@ export class WorkbenchStore {
230
  // TODO: what do we wanna do and how do we wanna recover from this?
231
  }
232
 
233
- addArtifact({ messageId, title, id }: ArtifactCallbackData) {
234
  const artifact = this.#getArtifact(messageId);
235
 
236
  if (artifact) {
@@ -245,6 +246,7 @@ export class WorkbenchStore {
245
  id,
246
  title,
247
  closed: false,
 
248
  runner: new ActionRunner(webcontainer, () => this.boltTerminal),
249
  });
250
  }
 
19
  export interface ArtifactState {
20
  id: string;
21
  title: string;
22
+ type?: string;
23
  closed: boolean;
24
  runner: ActionRunner;
25
  }
 
231
  // TODO: what do we wanna do and how do we wanna recover from this?
232
  }
233
 
234
+ addArtifact({ messageId, title, id, type }: ArtifactCallbackData) {
235
  const artifact = this.#getArtifact(messageId);
236
 
237
  if (artifact) {
 
246
  id,
247
  title,
248
  closed: false,
249
+ type,
250
  runner: new ActionRunner(webcontainer, () => this.boltTerminal),
251
  });
252
  }
app/routes/api.chat.ts CHANGED
@@ -1,6 +1,3 @@
1
- // eslint-disable-next-line @typescript-eslint/ban-ts-comment
2
- // @ts-nocheck – TODO: Provider proper types
3
-
4
  import { type ActionFunctionArgs } from '@remix-run/cloudflare';
5
  import { MAX_RESPONSE_SEGMENTS, MAX_TOKENS } from '~/lib/.server/llm/constants';
6
  import { CONTINUE_PROMPT } from '~/lib/.server/llm/prompts';
@@ -11,8 +8,8 @@ export async function action(args: ActionFunctionArgs) {
11
  return chatAction(args);
12
  }
13
 
14
- function parseCookies(cookieHeader) {
15
- const cookies = {};
16
 
17
  // Split the cookie string by semicolons and spaces
18
  const items = cookieHeader.split(';').map((cookie) => cookie.trim());
@@ -34,19 +31,19 @@ function parseCookies(cookieHeader) {
34
  async function chatAction({ context, request }: ActionFunctionArgs) {
35
  const { messages } = await request.json<{
36
  messages: Messages;
 
37
  }>();
38
 
39
  const cookieHeader = request.headers.get('Cookie');
40
 
41
  // Parse the cookie's value (returns an object or null if no cookie exists)
42
- const apiKeys = JSON.parse(parseCookies(cookieHeader).apiKeys || '{}');
43
 
44
  const stream = new SwitchableStream();
45
 
46
  try {
47
  const options: StreamingOptions = {
48
  toolChoice: 'none',
49
- apiKeys,
50
  onFinish: async ({ text: content, finishReason }) => {
51
  if (finishReason !== 'length') {
52
  return stream.close();
@@ -63,7 +60,7 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
63
  messages.push({ role: 'assistant', content });
64
  messages.push({ role: 'user', content: CONTINUE_PROMPT });
65
 
66
- const result = await streamText(messages, context.cloudflare.env, options);
67
 
68
  return stream.switchSource(result.toAIStream());
69
  },
@@ -79,7 +76,7 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
79
  contentType: 'text/plain; charset=utf-8',
80
  },
81
  });
82
- } catch (error) {
83
  console.log(error);
84
 
85
  if (error.message?.includes('API key')) {
 
 
 
 
1
  import { type ActionFunctionArgs } from '@remix-run/cloudflare';
2
  import { MAX_RESPONSE_SEGMENTS, MAX_TOKENS } from '~/lib/.server/llm/constants';
3
  import { CONTINUE_PROMPT } from '~/lib/.server/llm/prompts';
 
8
  return chatAction(args);
9
  }
10
 
11
+ function parseCookies(cookieHeader: string) {
12
+ const cookies: any = {};
13
 
14
  // Split the cookie string by semicolons and spaces
15
  const items = cookieHeader.split(';').map((cookie) => cookie.trim());
 
31
  async function chatAction({ context, request }: ActionFunctionArgs) {
32
  const { messages } = await request.json<{
33
  messages: Messages;
34
+ model: string;
35
  }>();
36
 
37
  const cookieHeader = request.headers.get('Cookie');
38
 
39
  // Parse the cookie's value (returns an object or null if no cookie exists)
40
+ const apiKeys = JSON.parse(parseCookies(cookieHeader || '').apiKeys || '{}');
41
 
42
  const stream = new SwitchableStream();
43
 
44
  try {
45
  const options: StreamingOptions = {
46
  toolChoice: 'none',
 
47
  onFinish: async ({ text: content, finishReason }) => {
48
  if (finishReason !== 'length') {
49
  return stream.close();
 
60
  messages.push({ role: 'assistant', content });
61
  messages.push({ role: 'user', content: CONTINUE_PROMPT });
62
 
63
+ const result = await streamText(messages, context.cloudflare.env, options, apiKeys);
64
 
65
  return stream.switchSource(result.toAIStream());
66
  },
 
76
  contentType: 'text/plain; charset=utf-8',
77
  },
78
  });
79
+ } catch (error: any) {
80
  console.log(error);
81
 
82
  if (error.message?.includes('API key')) {
app/types/artifact.ts CHANGED
@@ -1,4 +1,5 @@
1
  export interface BoltArtifactData {
2
  id: string;
3
  title: string;
 
4
  }
 
1
  export interface BoltArtifactData {
2
  id: string;
3
  title: string;
4
+ type?: string | undefined;
5
  }
app/types/global.d.ts CHANGED
@@ -1,3 +1,5 @@
1
  interface Window {
2
  showDirectoryPicker(): Promise<FileSystemDirectoryHandle>;
 
 
3
  }
 
1
  interface Window {
2
  showDirectoryPicker(): Promise<FileSystemDirectoryHandle>;
3
+ webkitSpeechRecognition: typeof SpeechRecognition;
4
+ SpeechRecognition: typeof SpeechRecognition;
5
  }
app/types/model.ts CHANGED
@@ -3,7 +3,7 @@ import type { ModelInfo } from '~/utils/types';
3
  export type ProviderInfo = {
4
  staticModels: ModelInfo[];
5
  name: string;
6
- getDynamicModels?: () => Promise<ModelInfo[]>;
7
  getApiKeyLink?: string;
8
  labelForGetApiKey?: string;
9
  icon?: string;
 
3
  export type ProviderInfo = {
4
  staticModels: ModelInfo[];
5
  name: string;
6
+ getDynamicModels?: (apiKeys?: Record<string, string>) => Promise<ModelInfo[]>;
7
  getApiKeyLink?: string;
8
  labelForGetApiKey?: string;
9
  icon?: string;
app/utils/constants.ts CHANGED
@@ -1,3 +1,4 @@
 
1
  import type { ModelInfo, OllamaApiResponse, OllamaModel } from './types';
2
  import type { ProviderInfo } from '~/types/model';
3
 
@@ -262,6 +263,7 @@ const PROVIDER_LIST: ProviderInfo[] = [
262
  },
263
  {
264
  name: 'Together',
 
265
  staticModels: [
266
  {
267
  name: 'Qwen/Qwen2.5-Coder-32B-Instruct',
@@ -293,6 +295,61 @@ const staticModels: ModelInfo[] = PROVIDER_LIST.map((p) => p.staticModels).flat(
293
 
294
  export let MODEL_LIST: ModelInfo[] = [...staticModels];
295
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
296
  const getOllamaBaseUrl = () => {
297
  const defaultBaseUrl = import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434';
298
 
@@ -340,7 +397,14 @@ async function getOpenAILikeModels(): Promise<ModelInfo[]> {
340
  return [];
341
  }
342
 
343
- const apiKey = import.meta.env.OPENAI_LIKE_API_KEY ?? '';
 
 
 
 
 
 
 
344
  const response = await fetch(`${baseUrl}/models`, {
345
  headers: {
346
  Authorization: `Bearer ${apiKey}`,
@@ -414,16 +478,32 @@ async function getLMStudioModels(): Promise<ModelInfo[]> {
414
  }
415
 
416
  async function initializeModelList(): Promise<ModelInfo[]> {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
417
  MODEL_LIST = [
418
  ...(
419
  await Promise.all(
420
  PROVIDER_LIST.filter(
421
  (p): p is ProviderInfo & { getDynamicModels: () => Promise<ModelInfo[]> } => !!p.getDynamicModels,
422
- ).map((p) => p.getDynamicModels()),
423
  )
424
  ).flat(),
425
  ...staticModels,
426
  ];
 
427
  return MODEL_LIST;
428
  }
429
 
 
1
+ import Cookies from 'js-cookie';
2
  import type { ModelInfo, OllamaApiResponse, OllamaModel } from './types';
3
  import type { ProviderInfo } from '~/types/model';
4
 
 
263
  },
264
  {
265
  name: 'Together',
266
+ getDynamicModels: getTogetherModels,
267
  staticModels: [
268
  {
269
  name: 'Qwen/Qwen2.5-Coder-32B-Instruct',
 
295
 
296
  export let MODEL_LIST: ModelInfo[] = [...staticModels];
297
 
298
+ export async function getModelList(apiKeys: Record<string, string>) {
299
+ MODEL_LIST = [
300
+ ...(
301
+ await Promise.all(
302
+ PROVIDER_LIST.filter(
303
+ (p): p is ProviderInfo & { getDynamicModels: () => Promise<ModelInfo[]> } => !!p.getDynamicModels,
304
+ ).map((p) => p.getDynamicModels(apiKeys)),
305
+ )
306
+ ).flat(),
307
+ ...staticModels,
308
+ ];
309
+ return MODEL_LIST;
310
+ }
311
+
312
+ async function getTogetherModels(apiKeys?: Record<string, string>): Promise<ModelInfo[]> {
313
+ try {
314
+ const baseUrl = import.meta.env.TOGETHER_API_BASE_URL || '';
315
+ const provider = 'Together';
316
+
317
+ if (!baseUrl) {
318
+ return [];
319
+ }
320
+
321
+ let apiKey = import.meta.env.OPENAI_LIKE_API_KEY ?? '';
322
+
323
+ if (apiKeys && apiKeys[provider]) {
324
+ apiKey = apiKeys[provider];
325
+ }
326
+
327
+ if (!apiKey) {
328
+ return [];
329
+ }
330
+
331
+ const response = await fetch(`${baseUrl}/models`, {
332
+ headers: {
333
+ Authorization: `Bearer ${apiKey}`,
334
+ },
335
+ });
336
+ const res = (await response.json()) as any;
337
+ const data: any[] = (res || []).filter((model: any) => model.type == 'chat');
338
+
339
+ return data.map((m: any) => ({
340
+ name: m.id,
341
+ label: `${m.display_name} - in:$${m.pricing.input.toFixed(
342
+ 2,
343
+ )} out:$${m.pricing.output.toFixed(2)} - context ${Math.floor(m.context_length / 1000)}k`,
344
+ provider,
345
+ maxTokenAllowed: 8000,
346
+ }));
347
+ } catch (e) {
348
+ console.error('Error getting OpenAILike models:', e);
349
+ return [];
350
+ }
351
+ }
352
+
353
  const getOllamaBaseUrl = () => {
354
  const defaultBaseUrl = import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434';
355
 
 
397
  return [];
398
  }
399
 
400
+ let apiKey = import.meta.env.OPENAI_LIKE_API_KEY ?? '';
401
+
402
+ const apikeys = JSON.parse(Cookies.get('apiKeys') || '{}');
403
+
404
+ if (apikeys && apikeys.OpenAILike) {
405
+ apiKey = apikeys.OpenAILike;
406
+ }
407
+
408
  const response = await fetch(`${baseUrl}/models`, {
409
  headers: {
410
  Authorization: `Bearer ${apiKey}`,
 
478
  }
479
 
480
  async function initializeModelList(): Promise<ModelInfo[]> {
481
+ let apiKeys: Record<string, string> = {};
482
+
483
+ try {
484
+ const storedApiKeys = Cookies.get('apiKeys');
485
+
486
+ if (storedApiKeys) {
487
+ const parsedKeys = JSON.parse(storedApiKeys);
488
+
489
+ if (typeof parsedKeys === 'object' && parsedKeys !== null) {
490
+ apiKeys = parsedKeys;
491
+ }
492
+ }
493
+ } catch (error: any) {
494
+ console.warn(`Failed to fetch apikeys from cookies:${error?.message}`);
495
+ }
496
  MODEL_LIST = [
497
  ...(
498
  await Promise.all(
499
  PROVIDER_LIST.filter(
500
  (p): p is ProviderInfo & { getDynamicModels: () => Promise<ModelInfo[]> } => !!p.getDynamicModels,
501
+ ).map((p) => p.getDynamicModels(apiKeys)),
502
  )
503
  ).flat(),
504
  ...staticModels,
505
  ];
506
+
507
  return MODEL_LIST;
508
  }
509
 
package.json CHANGED
@@ -102,6 +102,7 @@
102
  "@cloudflare/workers-types": "^4.20241127.0",
103
  "@remix-run/dev": "^2.15.0",
104
  "@types/diff": "^5.2.3",
 
105
  "@types/file-saver": "^2.0.7",
106
  "@types/js-cookie": "^3.0.6",
107
  "@types/react": "^18.3.12",
 
102
  "@cloudflare/workers-types": "^4.20241127.0",
103
  "@remix-run/dev": "^2.15.0",
104
  "@types/diff": "^5.2.3",
105
+ "@types/dom-speech-recognition": "^0.0.4",
106
  "@types/file-saver": "^2.0.7",
107
  "@types/js-cookie": "^3.0.6",
108
  "@types/react": "^18.3.12",
pnpm-lock.yaml CHANGED
@@ -225,6 +225,9 @@ importers:
225
  '@types/diff':
226
  specifier: ^5.2.3
227
  version: 5.2.3
 
 
 
228
  '@types/file-saver':
229
  specifier: ^2.0.7
230
  version: 2.0.7
@@ -2058,6 +2061,12 @@ packages:
2058
  '@types/[email protected]':
2059
  resolution: {integrity: sha512-K0Oqlrq3kQMaO2RhfrNQX5trmt+XLyom88zS0u84nnIcLvFnRUMRRHmrGny5GSM+kNO9IZLARsdQHDzkhAgmrQ==}
2060
 
 
 
 
 
 
 
2061
  '@types/[email protected]':
2062
  resolution: {integrity: sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==}
2063
 
@@ -7485,6 +7494,15 @@ snapshots:
7485
 
7486
  '@types/[email protected]': {}
7487
 
 
 
 
 
 
 
 
 
 
7488
  '@types/[email protected]':
7489
  dependencies:
7490
  '@types/estree': 1.0.6
@@ -7821,7 +7839,7 @@ snapshots:
7821
  '@babel/plugin-syntax-typescript': 7.25.9(@babel/[email protected])
7822
  '@vanilla-extract/babel-plugin-debug-ids': 1.1.0
7823
  '@vanilla-extract/css': 1.16.1
7824
- esbuild: 0.17.6
7825
  eval: 0.1.8
7826
  find-up: 5.0.0
7827
  javascript-stringify: 2.1.0
 
225
  '@types/diff':
226
  specifier: ^5.2.3
227
  version: 5.2.3
228
+ '@types/dom-speech-recognition':
229
+ specifier: ^0.0.4
230
+ version: 0.0.4
231
  '@types/file-saver':
232
  specifier: ^2.0.7
233
  version: 2.0.7
 
2061
  '@types/[email protected]':
2062
  resolution: {integrity: sha512-K0Oqlrq3kQMaO2RhfrNQX5trmt+XLyom88zS0u84nnIcLvFnRUMRRHmrGny5GSM+kNO9IZLARsdQHDzkhAgmrQ==}
2063
 
2064
+ '@types/[email protected]':
2065
+ resolution: {integrity: sha512-zf2GwV/G6TdaLwpLDcGTIkHnXf8JEf/viMux+khqKQKDa8/8BAUtXXZS563GnvJ4Fg0PBLGAaFf2GekEVSZ6GQ==}
2066
+
2067
+ '@types/[email protected]':
2068
+ resolution: {integrity: sha512-Shavhk87gCtY2fhXDctcfS3e6FdxWkCx1iUZ9eEUbh7rTqlZT0/IzOkCOVt0fCjcFuZ9FPYfuezTBImfHCDBGQ==}
2069
+
2070
  '@types/[email protected]':
2071
  resolution: {integrity: sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==}
2072
 
 
7494
 
7495
  '@types/[email protected]': {}
7496
 
7497
+
7498
+ '@types/[email protected]': {}
7499
+
7500
+ '@types/[email protected]':
7501
+ dependencies:
7502
+ '@types/estree': 1.0.6
7503
+ '@types/json-schema': 7.0.15
7504
+ optional: true
7505
+
7506
  '@types/[email protected]':
7507
  dependencies:
7508
  '@types/estree': 1.0.6
 
7839
  '@babel/plugin-syntax-typescript': 7.25.9(@babel/[email protected])
7840
  '@vanilla-extract/babel-plugin-debug-ids': 1.1.0
7841
  '@vanilla-extract/css': 1.16.1
7842
+ esbuild: 0.17.19
7843
  eval: 0.1.8
7844
  find-up: 5.0.0
7845
  javascript-stringify: 2.1.0
tsconfig.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "compilerOptions": {
3
  "lib": ["DOM", "DOM.Iterable", "ESNext"],
4
- "types": ["@remix-run/cloudflare", "vite/client", "@cloudflare/workers-types/2023-07-01"],
5
  "isolatedModules": true,
6
  "esModuleInterop": true,
7
  "jsx": "react-jsx",
 
1
  {
2
  "compilerOptions": {
3
  "lib": ["DOM", "DOM.Iterable", "ESNext"],
4
+ "types": ["@remix-run/cloudflare", "vite/client", "@cloudflare/workers-types/2023-07-01", "@types/dom-speech-recognition"],
5
  "isolatedModules": true,
6
  "esModuleInterop": true,
7
  "jsx": "react-jsx",
vite.config.ts CHANGED
@@ -19,8 +19,7 @@ export default defineConfig((config) => {
19
  future: {
20
  v3_fetcherPersist: true,
21
  v3_relativeSplatPath: true,
22
- v3_throwAbortReason: true,
23
- v3_lazyRouteDiscovery: true,
24
  },
25
  }),
26
  UnoCSS(),
@@ -28,7 +27,7 @@ export default defineConfig((config) => {
28
  chrome129IssuePlugin(),
29
  config.mode === 'production' && optimizeCssModules({ apply: 'build' }),
30
  ],
31
- envPrefix:["VITE_","OPENAI_LIKE_API_","OLLAMA_API_BASE_URL","LMSTUDIO_API_BASE_URL"],
32
  css: {
33
  preprocessorOptions: {
34
  scss: {
 
19
  future: {
20
  v3_fetcherPersist: true,
21
  v3_relativeSplatPath: true,
22
+ v3_throwAbortReason: true
 
23
  },
24
  }),
25
  UnoCSS(),
 
27
  chrome129IssuePlugin(),
28
  config.mode === 'production' && optimizeCssModules({ apply: 'build' }),
29
  ],
30
+ envPrefix: ["VITE_", "OPENAI_LIKE_API_", "OLLAMA_API_BASE_URL", "LMSTUDIO_API_BASE_URL","TOGETHER_API_BASE_URL"],
31
  css: {
32
  preprocessorOptions: {
33
  scss: {