Anon commited on
Commit
c35211f
·
unverified ·
2 Parent(s): bb0546d ce6b65e

Merge branch 'main' into claude-new-sonnet-and-haiku

Browse files
.gitignore CHANGED
@@ -31,3 +31,4 @@ dist-ssr
31
  _worker.bundle
32
 
33
  Modelfile
 
 
31
  _worker.bundle
32
 
33
  Modelfile
34
+ modelfiles
README.md CHANGED
@@ -85,7 +85,7 @@ If you see usr/local/bin in the output then you're good to go.
85
  git clone https://github.com/coleam00/bolt.new-any-llm.git
86
  ```
87
 
88
- 3. Rename .env.example to .env and add your LLM API keys. You will find this file on a Mac at "[your name]/bold.new-any-llm/.env.example". For Windows and Linux the path will be similar.
89
 
90
  ![image](https://github.com/user-attachments/assets/7e6a532c-2268-401f-8310-e8d20c731328)
91
 
@@ -115,7 +115,7 @@ Optionally, you can set the debug level:
115
  VITE_LOG_LEVEL=debug
116
  ```
117
 
118
- **Important**: Never commit your `.env` file to version control. It's already included in .gitignore.
119
 
120
  ## Run with Docker
121
 
 
85
  git clone https://github.com/coleam00/bolt.new-any-llm.git
86
  ```
87
 
88
+ 3. Rename .env.example to .env.local and add your LLM API keys. You will find this file on a Mac at "[your name]/bold.new-any-llm/.env.example". For Windows and Linux the path will be similar.
89
 
90
  ![image](https://github.com/user-attachments/assets/7e6a532c-2268-401f-8310-e8d20c731328)
91
 
 
115
  VITE_LOG_LEVEL=debug
116
  ```
117
 
118
+ **Important**: Never commit your `.env.local` file to version control. It's already included in .gitignore.
119
 
120
  ## Run with Docker
121
 
app/components/chat/APIKeyManager.tsx ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import React, { useState } from 'react';
2
+ import { IconButton } from '~/components/ui/IconButton';
3
+
4
+ interface APIKeyManagerProps {
5
+ provider: string;
6
+ apiKey: string;
7
+ setApiKey: (key: string) => void;
8
+ }
9
+
10
+ export const APIKeyManager: React.FC<APIKeyManagerProps> = ({ provider, apiKey, setApiKey }) => {
11
+ const [isEditing, setIsEditing] = useState(false);
12
+ const [tempKey, setTempKey] = useState(apiKey);
13
+
14
+ const handleSave = () => {
15
+ setApiKey(tempKey);
16
+ setIsEditing(false);
17
+ };
18
+
19
+ return (
20
+ <div className="flex items-center gap-2 mt-2 mb-2">
21
+ <span className="text-sm text-bolt-elements-textSecondary">{provider} API Key:</span>
22
+ {isEditing ? (
23
+ <>
24
+ <input
25
+ type="password"
26
+ value={tempKey}
27
+ onChange={(e) => setTempKey(e.target.value)}
28
+ className="flex-1 p-1 text-sm rounded border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus"
29
+ />
30
+ <IconButton onClick={handleSave} title="Save API Key">
31
+ <div className="i-ph:check" />
32
+ </IconButton>
33
+ <IconButton onClick={() => setIsEditing(false)} title="Cancel">
34
+ <div className="i-ph:x" />
35
+ </IconButton>
36
+ </>
37
+ ) : (
38
+ <>
39
+ <span className="flex-1 text-sm text-bolt-elements-textPrimary">
40
+ {apiKey ? '••••••••' : 'Not set (will still work if set in .env file)'}
41
+ </span>
42
+ <IconButton onClick={() => setIsEditing(true)} title="Edit API Key">
43
+ <div className="i-ph:pencil-simple" />
44
+ </IconButton>
45
+ </>
46
+ )}
47
+ </div>
48
+ );
49
+ };
app/components/chat/BaseChat.tsx CHANGED
@@ -1,7 +1,7 @@
1
  // @ts-nocheck
2
  // Preventing TS checks with files presented in the video for a better presentation.
3
  import type { Message } from 'ai';
4
- import React, { type RefCallback } from 'react';
5
  import { ClientOnly } from 'remix-utils/client-only';
6
  import { Menu } from '~/components/sidebar/Menu.client';
7
  import { IconButton } from '~/components/ui/IconButton';
@@ -11,6 +11,8 @@ import { MODEL_LIST, DEFAULT_PROVIDER } from '~/utils/constants';
11
  import { Messages } from './Messages.client';
12
  import { SendButton } from './SendButton.client';
13
  import { useState } from 'react';
 
 
14
 
15
  import styles from './BaseChat.module.scss';
16
 
@@ -24,18 +26,17 @@ const EXAMPLE_PROMPTS = [
24
 
25
  const providerList = [...new Set(MODEL_LIST.map((model) => model.provider))]
26
 
27
- const ModelSelector = ({ model, setModel, modelList, providerList }) => {
28
- const [provider, setProvider] = useState(DEFAULT_PROVIDER);
29
  return (
30
- <div className="mb-2">
31
- <select
32
  value={provider}
33
  onChange={(e) => {
34
  setProvider(e.target.value);
35
  const firstModel = [...modelList].find(m => m.provider == e.target.value);
36
  setModel(firstModel ? firstModel.name : '');
37
  }}
38
- className="w-full p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none"
39
  >
40
  {providerList.map((provider) => (
41
  <option key={provider} value={provider}>
@@ -52,7 +53,7 @@ const ModelSelector = ({ model, setModel, modelList, providerList }) => {
52
  <select
53
  value={model}
54
  onChange={(e) => setModel(e.target.value)}
55
- className="w-full p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none"
56
  >
57
  {[...modelList].filter(e => e.provider == provider && e.name).map((modelOption) => (
58
  <option key={modelOption.name} value={modelOption.name}>
@@ -79,6 +80,8 @@ interface BaseChatProps {
79
  input?: string;
80
  model: string;
81
  setModel: (model: string) => void;
 
 
82
  handleStop?: () => void;
83
  sendMessage?: (event: React.UIEvent, messageInput?: string) => void;
84
  handleInputChange?: (event: React.ChangeEvent<HTMLTextAreaElement>) => void;
@@ -100,6 +103,8 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
100
  input = '',
101
  model,
102
  setModel,
 
 
103
  sendMessage,
104
  handleInputChange,
105
  enhancePrompt,
@@ -108,6 +113,40 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
108
  ref,
109
  ) => {
110
  const TEXTAREA_MAX_HEIGHT = chatStarted ? 400 : 200;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
111
 
112
  return (
113
  <div
@@ -122,11 +161,11 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
122
  <div ref={scrollRef} className="flex overflow-y-auto w-full h-full">
123
  <div className={classNames(styles.Chat, 'flex flex-col flex-grow min-w-[var(--chat-min-width)] h-full')}>
124
  {!chatStarted && (
125
- <div id="intro" className="mt-[26vh] max-w-chat mx-auto">
126
- <h1 className="text-5xl text-center font-bold text-bolt-elements-textPrimary mb-2">
127
  Where ideas begin
128
  </h1>
129
- <p className="mb-4 text-center text-bolt-elements-textSecondary">
130
  Bring ideas to life in seconds or get help on existing projects.
131
  </p>
132
  </div>
@@ -157,16 +196,23 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
157
  model={model}
158
  setModel={setModel}
159
  modelList={MODEL_LIST}
 
 
160
  providerList={providerList}
161
  />
 
 
 
 
 
162
  <div
163
  className={classNames(
164
- 'shadow-sm border border-bolt-elements-borderColor bg-bolt-elements-prompt-background backdrop-filter backdrop-blur-[8px] rounded-lg overflow-hidden',
165
  )}
166
  >
167
  <textarea
168
  ref={textareaRef}
169
- className={`w-full pl-4 pt-4 pr-16 focus:outline-none resize-none text-md text-bolt-elements-textPrimary placeholder-bolt-elements-textTertiary bg-transparent`}
170
  onKeyDown={(event) => {
171
  if (event.key === 'Enter') {
172
  if (event.shiftKey) {
@@ -205,12 +251,12 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
205
  />
206
  )}
207
  </ClientOnly>
208
- <div className="flex justify-between text-sm p-4 pt-2">
209
  <div className="flex gap-1 items-center">
210
  <IconButton
211
  title="Enhance prompt"
212
  disabled={input.length === 0 || enhancingPrompt}
213
- className={classNames({
214
  'opacity-100!': enhancingPrompt,
215
  'text-bolt-elements-item-contentAccent! pr-1.5 enabled:hover:bg-bolt-elements-item-backgroundAccent!':
216
  promptEnhanced,
@@ -219,7 +265,7 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
219
  >
220
  {enhancingPrompt ? (
221
  <>
222
- <div className="i-svg-spinners:90-ring-with-bg text-bolt-elements-loader-progress text-xl"></div>
223
  <div className="ml-1.5">Enhancing prompt...</div>
224
  </>
225
  ) : (
@@ -232,7 +278,7 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
232
  </div>
233
  {input.length > 3 ? (
234
  <div className="text-xs text-bolt-elements-textTertiary">
235
- Use <kbd className="kdb">Shift</kbd> + <kbd className="kdb">Return</kbd> for a new line
236
  </div>
237
  ) : null}
238
  </div>
@@ -266,4 +312,4 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
266
  </div>
267
  );
268
  },
269
- );
 
1
  // @ts-nocheck
2
  // Preventing TS checks with files presented in the video for a better presentation.
3
  import type { Message } from 'ai';
4
+ import React, { type RefCallback, useEffect } from 'react';
5
  import { ClientOnly } from 'remix-utils/client-only';
6
  import { Menu } from '~/components/sidebar/Menu.client';
7
  import { IconButton } from '~/components/ui/IconButton';
 
11
  import { Messages } from './Messages.client';
12
  import { SendButton } from './SendButton.client';
13
  import { useState } from 'react';
14
+ import { APIKeyManager } from './APIKeyManager';
15
+ import Cookies from 'js-cookie';
16
 
17
  import styles from './BaseChat.module.scss';
18
 
 
26
 
27
  const providerList = [...new Set(MODEL_LIST.map((model) => model.provider))]
28
 
29
+ const ModelSelector = ({ model, setModel, provider, setProvider, modelList, providerList }) => {
 
30
  return (
31
+ <div className="mb-2 flex gap-2">
32
+ <select
33
  value={provider}
34
  onChange={(e) => {
35
  setProvider(e.target.value);
36
  const firstModel = [...modelList].find(m => m.provider == e.target.value);
37
  setModel(firstModel ? firstModel.name : '');
38
  }}
39
+ className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all"
40
  >
41
  {providerList.map((provider) => (
42
  <option key={provider} value={provider}>
 
53
  <select
54
  value={model}
55
  onChange={(e) => setModel(e.target.value)}
56
+ className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all"
57
  >
58
  {[...modelList].filter(e => e.provider == provider && e.name).map((modelOption) => (
59
  <option key={modelOption.name} value={modelOption.name}>
 
80
  input?: string;
81
  model: string;
82
  setModel: (model: string) => void;
83
+ provider: string;
84
+ setProvider: (provider: string) => void;
85
  handleStop?: () => void;
86
  sendMessage?: (event: React.UIEvent, messageInput?: string) => void;
87
  handleInputChange?: (event: React.ChangeEvent<HTMLTextAreaElement>) => void;
 
103
  input = '',
104
  model,
105
  setModel,
106
+ provider,
107
+ setProvider,
108
  sendMessage,
109
  handleInputChange,
110
  enhancePrompt,
 
113
  ref,
114
  ) => {
115
  const TEXTAREA_MAX_HEIGHT = chatStarted ? 400 : 200;
116
+ const [apiKeys, setApiKeys] = useState<Record<string, string>>({});
117
+
118
+ useEffect(() => {
119
+ // Load API keys from cookies on component mount
120
+ try {
121
+ const storedApiKeys = Cookies.get('apiKeys');
122
+ if (storedApiKeys) {
123
+ const parsedKeys = JSON.parse(storedApiKeys);
124
+ if (typeof parsedKeys === 'object' && parsedKeys !== null) {
125
+ setApiKeys(parsedKeys);
126
+ }
127
+ }
128
+ } catch (error) {
129
+ console.error('Error loading API keys from cookies:', error);
130
+ // Clear invalid cookie data
131
+ Cookies.remove('apiKeys');
132
+ }
133
+ }, []);
134
+
135
+ const updateApiKey = (provider: string, key: string) => {
136
+ try {
137
+ const updatedApiKeys = { ...apiKeys, [provider]: key };
138
+ setApiKeys(updatedApiKeys);
139
+ // Save updated API keys to cookies with 30 day expiry and secure settings
140
+ Cookies.set('apiKeys', JSON.stringify(updatedApiKeys), {
141
+ expires: 30, // 30 days
142
+ secure: true, // Only send over HTTPS
143
+ sameSite: 'strict', // Protect against CSRF
144
+ path: '/' // Accessible across the site
145
+ });
146
+ } catch (error) {
147
+ console.error('Error saving API keys to cookies:', error);
148
+ }
149
+ };
150
 
151
  return (
152
  <div
 
161
  <div ref={scrollRef} className="flex overflow-y-auto w-full h-full">
162
  <div className={classNames(styles.Chat, 'flex flex-col flex-grow min-w-[var(--chat-min-width)] h-full')}>
163
  {!chatStarted && (
164
+ <div id="intro" className="mt-[26vh] max-w-chat mx-auto text-center">
165
+ <h1 className="text-6xl font-bold text-bolt-elements-textPrimary mb-4 animate-fade-in">
166
  Where ideas begin
167
  </h1>
168
+ <p className="text-xl mb-8 text-bolt-elements-textSecondary animate-fade-in animation-delay-200">
169
  Bring ideas to life in seconds or get help on existing projects.
170
  </p>
171
  </div>
 
196
  model={model}
197
  setModel={setModel}
198
  modelList={MODEL_LIST}
199
+ provider={provider}
200
+ setProvider={setProvider}
201
  providerList={providerList}
202
  />
203
+ <APIKeyManager
204
+ provider={provider}
205
+ apiKey={apiKeys[provider] || ''}
206
+ setApiKey={(key) => updateApiKey(provider, key)}
207
+ />
208
  <div
209
  className={classNames(
210
+ 'shadow-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background backdrop-filter backdrop-blur-[8px] rounded-lg overflow-hidden transition-all',
211
  )}
212
  >
213
  <textarea
214
  ref={textareaRef}
215
+ className={`w-full pl-4 pt-4 pr-16 focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus resize-none text-md text-bolt-elements-textPrimary placeholder-bolt-elements-textTertiary bg-transparent transition-all`}
216
  onKeyDown={(event) => {
217
  if (event.key === 'Enter') {
218
  if (event.shiftKey) {
 
251
  />
252
  )}
253
  </ClientOnly>
254
+ <div className="flex justify-between items-center text-sm p-4 pt-2">
255
  <div className="flex gap-1 items-center">
256
  <IconButton
257
  title="Enhance prompt"
258
  disabled={input.length === 0 || enhancingPrompt}
259
+ className={classNames('transition-all', {
260
  'opacity-100!': enhancingPrompt,
261
  'text-bolt-elements-item-contentAccent! pr-1.5 enabled:hover:bg-bolt-elements-item-backgroundAccent!':
262
  promptEnhanced,
 
265
  >
266
  {enhancingPrompt ? (
267
  <>
268
+ <div className="i-svg-spinners:90-ring-with-bg text-bolt-elements-loader-progress text-xl animate-spin"></div>
269
  <div className="ml-1.5">Enhancing prompt...</div>
270
  </>
271
  ) : (
 
278
  </div>
279
  {input.length > 3 ? (
280
  <div className="text-xs text-bolt-elements-textTertiary">
281
+ Use <kbd className="kdb px-1.5 py-0.5 rounded bg-bolt-elements-background-depth-2">Shift</kbd> + <kbd className="kdb px-1.5 py-0.5 rounded bg-bolt-elements-background-depth-2">Return</kbd> for a new line
282
  </div>
283
  ) : null}
284
  </div>
 
312
  </div>
313
  );
314
  },
315
+ );
app/components/chat/Chat.client.tsx CHANGED
@@ -11,10 +11,11 @@ import { useChatHistory } from '~/lib/persistence';
11
  import { chatStore } from '~/lib/stores/chat';
12
  import { workbenchStore } from '~/lib/stores/workbench';
13
  import { fileModificationsToHTML } from '~/utils/diff';
14
- import { DEFAULT_MODEL } from '~/utils/constants';
15
  import { cubicEasingFn } from '~/utils/easings';
16
  import { createScopedLogger, renderLogger } from '~/utils/logger';
17
  import { BaseChat } from './BaseChat';
 
18
 
19
  const toastAnimation = cssTransition({
20
  enter: 'animated fadeInRight',
@@ -74,13 +75,19 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
74
 
75
  const [chatStarted, setChatStarted] = useState(initialMessages.length > 0);
76
  const [model, setModel] = useState(DEFAULT_MODEL);
 
77
 
78
  const { showChat } = useStore(chatStore);
79
 
80
  const [animationScope, animate] = useAnimate();
81
 
 
 
82
  const { messages, isLoading, input, handleInputChange, setInput, stop, append } = useChat({
83
  api: '/api/chat',
 
 
 
84
  onError: (error) => {
85
  logger.error('Request failed\n\n', error);
86
  toast.error('There was an error processing your request');
@@ -182,7 +189,7 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
182
  * manually reset the input and we'd have to manually pass in file attachments. However, those
183
  * aren't relevant here.
184
  */
185
- append({ role: 'user', content: `[Model: ${model}]\n\n${diff}\n\n${_input}` });
186
 
187
  /**
188
  * After sending a new message we reset all modifications since the model
@@ -190,7 +197,7 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
190
  */
191
  workbenchStore.resetAllFileModifications();
192
  } else {
193
- append({ role: 'user', content: `[Model: ${model}]\n\n${_input}` });
194
  }
195
 
196
  setInput('');
@@ -202,6 +209,13 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
202
 
203
  const [messageRef, scrollRef] = useSnapScroll();
204
 
 
 
 
 
 
 
 
205
  return (
206
  <BaseChat
207
  ref={animationScope}
@@ -215,6 +229,8 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
215
  sendMessage={sendMessage}
216
  model={model}
217
  setModel={setModel}
 
 
218
  messageRef={messageRef}
219
  scrollRef={scrollRef}
220
  handleInputChange={handleInputChange}
 
11
  import { chatStore } from '~/lib/stores/chat';
12
  import { workbenchStore } from '~/lib/stores/workbench';
13
  import { fileModificationsToHTML } from '~/utils/diff';
14
+ import { DEFAULT_MODEL, DEFAULT_PROVIDER } from '~/utils/constants';
15
  import { cubicEasingFn } from '~/utils/easings';
16
  import { createScopedLogger, renderLogger } from '~/utils/logger';
17
  import { BaseChat } from './BaseChat';
18
+ import Cookies from 'js-cookie';
19
 
20
  const toastAnimation = cssTransition({
21
  enter: 'animated fadeInRight',
 
75
 
76
  const [chatStarted, setChatStarted] = useState(initialMessages.length > 0);
77
  const [model, setModel] = useState(DEFAULT_MODEL);
78
+ const [provider, setProvider] = useState(DEFAULT_PROVIDER);
79
 
80
  const { showChat } = useStore(chatStore);
81
 
82
  const [animationScope, animate] = useAnimate();
83
 
84
+ const [apiKeys, setApiKeys] = useState<Record<string, string>>({});
85
+
86
  const { messages, isLoading, input, handleInputChange, setInput, stop, append } = useChat({
87
  api: '/api/chat',
88
+ body: {
89
+ apiKeys
90
+ },
91
  onError: (error) => {
92
  logger.error('Request failed\n\n', error);
93
  toast.error('There was an error processing your request');
 
189
  * manually reset the input and we'd have to manually pass in file attachments. However, those
190
  * aren't relevant here.
191
  */
192
+ append({ role: 'user', content: `[Model: ${model}]\n\n[Provider: ${provider}]\n\n${diff}\n\n${_input}` });
193
 
194
  /**
195
  * After sending a new message we reset all modifications since the model
 
197
  */
198
  workbenchStore.resetAllFileModifications();
199
  } else {
200
+ append({ role: 'user', content: `[Model: ${model}]\n\n[Provider: ${provider}]\n\n${_input}` });
201
  }
202
 
203
  setInput('');
 
209
 
210
  const [messageRef, scrollRef] = useSnapScroll();
211
 
212
+ useEffect(() => {
213
+ const storedApiKeys = Cookies.get('apiKeys');
214
+ if (storedApiKeys) {
215
+ setApiKeys(JSON.parse(storedApiKeys));
216
+ }
217
+ }, []);
218
+
219
  return (
220
  <BaseChat
221
  ref={animationScope}
 
229
  sendMessage={sendMessage}
230
  model={model}
231
  setModel={setModel}
232
+ provider={provider}
233
+ setProvider={setProvider}
234
  messageRef={messageRef}
235
  scrollRef={scrollRef}
236
  handleInputChange={handleInputChange}
app/components/chat/UserMessage.tsx CHANGED
@@ -1,7 +1,7 @@
1
  // @ts-nocheck
2
  // Preventing TS checks with files presented in the video for a better presentation.
3
  import { modificationsRegex } from '~/utils/diff';
4
- import { MODEL_REGEX } from '~/utils/constants';
5
  import { Markdown } from './Markdown';
6
 
7
  interface UserMessageProps {
@@ -17,5 +17,5 @@ export function UserMessage({ content }: UserMessageProps) {
17
  }
18
 
19
  function sanitizeUserMessage(content: string) {
20
- return content.replace(modificationsRegex, '').replace(MODEL_REGEX, '').trim();
21
  }
 
1
  // @ts-nocheck
2
  // Preventing TS checks with files presented in the video for a better presentation.
3
  import { modificationsRegex } from '~/utils/diff';
4
+ import { MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';
5
  import { Markdown } from './Markdown';
6
 
7
  interface UserMessageProps {
 
17
  }
18
 
19
  function sanitizeUserMessage(content: string) {
20
+ return content.replace(modificationsRegex, '').replace(MODEL_REGEX, 'Using: $1').replace(PROVIDER_REGEX, ' ($1)\n\n').trim();
21
  }
app/lib/.server/llm/api-key.ts CHANGED
@@ -2,12 +2,18 @@
2
  // Preventing TS checks with files presented in the video for a better presentation.
3
  import { env } from 'node:process';
4
 
5
- export function getAPIKey(cloudflareEnv: Env, provider: string) {
6
  /**
7
  * The `cloudflareEnv` is only used when deployed or when previewing locally.
8
  * In development the environment variables are available through `env`.
9
  */
10
 
 
 
 
 
 
 
11
  switch (provider) {
12
  case 'Anthropic':
13
  return env.ANTHROPIC_API_KEY || cloudflareEnv.ANTHROPIC_API_KEY;
 
2
  // Preventing TS checks with files presented in the video for a better presentation.
3
  import { env } from 'node:process';
4
 
5
+ export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Record<string, string>) {
6
  /**
7
  * The `cloudflareEnv` is only used when deployed or when previewing locally.
8
  * In development the environment variables are available through `env`.
9
  */
10
 
11
+ // First check user-provided API keys
12
+ if (userApiKeys?.[provider]) {
13
+ return userApiKeys[provider];
14
+ }
15
+
16
+ // Fall back to environment variables
17
  switch (provider) {
18
  case 'Anthropic':
19
  return env.ANTHROPIC_API_KEY || cloudflareEnv.ANTHROPIC_API_KEY;
app/lib/.server/llm/model.ts CHANGED
@@ -58,7 +58,10 @@ export function getGroqModel(apiKey: string, model: string) {
58
  }
59
 
60
  export function getOllamaModel(baseURL: string, model: string) {
61
- let Ollama = ollama(model);
 
 
 
62
  Ollama.config.baseURL = `${baseURL}/api`;
63
  return Ollama;
64
  }
@@ -88,9 +91,8 @@ export function getXAIModel(apiKey: string, model: string) {
88
 
89
  return openai(model);
90
  }
91
-
92
- export function getModel(provider: string, model: string, env: Env) {
93
- const apiKey = getAPIKey(env, provider);
94
  const baseURL = getBaseURL(env, provider);
95
 
96
  switch (provider) {
 
58
  }
59
 
60
  export function getOllamaModel(baseURL: string, model: string) {
61
+ let Ollama = ollama(model, {
62
+ numCtx: 32768,
63
+ });
64
+
65
  Ollama.config.baseURL = `${baseURL}/api`;
66
  return Ollama;
67
  }
 
91
 
92
  return openai(model);
93
  }
94
+ export function getModel(provider: string, model: string, env: Env, apiKeys?: Record<string, string>) {
95
+ const apiKey = getAPIKey(env, provider, apiKeys);
 
96
  const baseURL = getBaseURL(env, provider);
97
 
98
  switch (provider) {
app/lib/.server/llm/stream-text.ts CHANGED
@@ -4,7 +4,7 @@ import { streamText as _streamText, convertToCoreMessages } from 'ai';
4
  import { getModel } from '~/lib/.server/llm/model';
5
  import { MAX_TOKENS } from './constants';
6
  import { getSystemPrompt } from './prompts';
7
- import { MODEL_LIST, DEFAULT_MODEL, DEFAULT_PROVIDER } from '~/utils/constants';
8
 
9
  interface ToolResult<Name extends string, Args, Result> {
10
  toolCallId: string;
@@ -24,42 +24,53 @@ export type Messages = Message[];
24
 
25
  export type StreamingOptions = Omit<Parameters<typeof _streamText>[0], 'model'>;
26
 
27
- function extractModelFromMessage(message: Message): { model: string; content: string } {
28
- const modelRegex = /^\[Model: (.*?)\]\n\n/;
29
- const match = message.content.match(modelRegex);
 
30
 
31
- if (match) {
32
- const model = match[1];
33
- const content = message.content.replace(modelRegex, '');
34
- return { model, content };
35
- }
36
 
37
- // Default model if not specified
38
- return { model: DEFAULT_MODEL, content: message.content };
 
 
 
 
 
39
  }
40
 
41
- export function streamText(messages: Messages, env: Env, options?: StreamingOptions) {
 
 
 
 
 
42
  let currentModel = DEFAULT_MODEL;
 
 
43
  const processedMessages = messages.map((message) => {
44
  if (message.role === 'user') {
45
- const { model, content } = extractModelFromMessage(message);
46
- if (model && MODEL_LIST.find((m) => m.name === model)) {
47
- currentModel = model; // Update the current model
 
48
  }
 
 
 
49
  return { ...message, content };
50
  }
51
- return message;
52
- });
53
 
54
- const provider = MODEL_LIST.find((model) => model.name === currentModel)?.provider || DEFAULT_PROVIDER;
 
55
 
56
  return _streamText({
57
- model: getModel(provider, currentModel, env),
58
  system: getSystemPrompt(),
59
  maxTokens: MAX_TOKENS,
60
- // headers: {
61
- // 'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15',
62
- // },
63
  messages: convertToCoreMessages(processedMessages),
64
  ...options,
65
  });
 
4
  import { getModel } from '~/lib/.server/llm/model';
5
  import { MAX_TOKENS } from './constants';
6
  import { getSystemPrompt } from './prompts';
7
+ import { MODEL_LIST, DEFAULT_MODEL, DEFAULT_PROVIDER, MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';
8
 
9
  interface ToolResult<Name extends string, Args, Result> {
10
  toolCallId: string;
 
24
 
25
  export type StreamingOptions = Omit<Parameters<typeof _streamText>[0], 'model'>;
26
 
27
+ function extractPropertiesFromMessage(message: Message): { model: string; provider: string; content: string } {
28
+ // Extract model
29
+ const modelMatch = message.content.match(MODEL_REGEX);
30
+ const model = modelMatch ? modelMatch[1] : DEFAULT_MODEL;
31
 
32
+ // Extract provider
33
+ const providerMatch = message.content.match(PROVIDER_REGEX);
34
+ const provider = providerMatch ? providerMatch[1] : DEFAULT_PROVIDER;
 
 
35
 
36
+ // Remove model and provider lines from content
37
+ const cleanedContent = message.content
38
+ .replace(MODEL_REGEX, '')
39
+ .replace(PROVIDER_REGEX, '')
40
+ .trim();
41
+
42
+ return { model, provider, content: cleanedContent };
43
  }
44
 
45
+ export function streamText(
46
+ messages: Messages,
47
+ env: Env,
48
+ options?: StreamingOptions,
49
+ apiKeys?: Record<string, string>
50
+ ) {
51
  let currentModel = DEFAULT_MODEL;
52
+ let currentProvider = DEFAULT_PROVIDER;
53
+
54
  const processedMessages = messages.map((message) => {
55
  if (message.role === 'user') {
56
+ const { model, provider, content } = extractPropertiesFromMessage(message);
57
+
58
+ if (MODEL_LIST.find((m) => m.name === model)) {
59
+ currentModel = model;
60
  }
61
+
62
+ currentProvider = provider;
63
+
64
  return { ...message, content };
65
  }
 
 
66
 
67
+ return message; // No changes for non-user messages
68
+ });
69
 
70
  return _streamText({
71
+ model: getModel(currentProvider, currentModel, env, apiKeys),
72
  system: getSystemPrompt(),
73
  maxTokens: MAX_TOKENS,
 
 
 
74
  messages: convertToCoreMessages(processedMessages),
75
  ...options,
76
  });
app/routes/api.chat.ts CHANGED
@@ -11,13 +11,17 @@ export async function action(args: ActionFunctionArgs) {
11
  }
12
 
13
  async function chatAction({ context, request }: ActionFunctionArgs) {
14
- const { messages } = await request.json<{ messages: Messages }>();
 
 
 
15
 
16
  const stream = new SwitchableStream();
17
 
18
  try {
19
  const options: StreamingOptions = {
20
  toolChoice: 'none',
 
21
  onFinish: async ({ text: content, finishReason }) => {
22
  if (finishReason !== 'length') {
23
  return stream.close();
@@ -40,7 +44,7 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
40
  },
41
  };
42
 
43
- const result = await streamText(messages, context.cloudflare.env, options);
44
 
45
  stream.switchSource(result.toAIStream());
46
 
@@ -52,6 +56,13 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
52
  });
53
  } catch (error) {
54
  console.log(error);
 
 
 
 
 
 
 
55
 
56
  throw new Response(null, {
57
  status: 500,
 
11
  }
12
 
13
  async function chatAction({ context, request }: ActionFunctionArgs) {
14
+ const { messages, apiKeys } = await request.json<{
15
+ messages: Messages,
16
+ apiKeys: Record<string, string>
17
+ }>();
18
 
19
  const stream = new SwitchableStream();
20
 
21
  try {
22
  const options: StreamingOptions = {
23
  toolChoice: 'none',
24
+ apiKeys,
25
  onFinish: async ({ text: content, finishReason }) => {
26
  if (finishReason !== 'length') {
27
  return stream.close();
 
44
  },
45
  };
46
 
47
+ const result = await streamText(messages, context.cloudflare.env, options, apiKeys);
48
 
49
  stream.switchSource(result.toAIStream());
50
 
 
56
  });
57
  } catch (error) {
58
  console.log(error);
59
+
60
+ if (error.message?.includes('API key')) {
61
+ throw new Response('Invalid or missing API key', {
62
+ status: 401,
63
+ statusText: 'Unauthorized'
64
+ });
65
+ }
66
 
67
  throw new Response(null, {
68
  status: 500,
app/utils/constants.ts CHANGED
@@ -4,6 +4,7 @@ export const WORK_DIR_NAME = 'project';
4
  export const WORK_DIR = `/home/${WORK_DIR_NAME}`;
5
  export const MODIFICATIONS_TAG_NAME = 'bolt_file_modifications';
6
  export const MODEL_REGEX = /^\[Model: (.*?)\]\n\n/;
 
7
  export const DEFAULT_MODEL = 'claude-3-5-sonnet-latest';
8
  export const DEFAULT_PROVIDER = 'Anthropic';
9
 
@@ -19,7 +20,7 @@ const staticModels: ModelInfo[] = [
19
  { name: 'qwen/qwen-110b-chat', label: 'OpenRouter Qwen 110b Chat (OpenRouter)', provider: 'OpenRouter' },
20
  { name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter' },
21
  { name: 'gemini-1.5-flash-latest', label: 'Gemini 1.5 Flash', provider: 'Google' },
22
- { name: 'gemini-1.5-pro-latest', label: 'Gemini 1.5 Pro', provider: 'Google'},
23
  { name: 'llama-3.1-70b-versatile', label: 'Llama 3.1 70b (Groq)', provider: 'Groq' },
24
  { name: 'llama-3.1-8b-instant', label: 'Llama 3.1 8b (Groq)', provider: 'Groq' },
25
  { name: 'llama-3.2-11b-vision-preview', label: 'Llama 3.2 11b (Groq)', provider: 'Groq' },
@@ -58,11 +59,11 @@ const getOllamaBaseUrl = () => {
58
  // Frontend always uses localhost
59
  return defaultBaseUrl;
60
  }
61
-
62
  // Backend: Check if we're running in Docker
63
  const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
64
-
65
- return isDocker
66
  ? defaultBaseUrl.replace("localhost", "host.docker.internal")
67
  : defaultBaseUrl;
68
  };
@@ -84,32 +85,32 @@ async function getOllamaModels(): Promise<ModelInfo[]> {
84
  }
85
 
86
  async function getOpenAILikeModels(): Promise<ModelInfo[]> {
87
- try {
88
- const base_url =import.meta.env.OPENAI_LIKE_API_BASE_URL || "";
89
- if (!base_url) {
90
  return [];
91
- }
92
- const api_key = import.meta.env.OPENAI_LIKE_API_KEY ?? "";
93
- const response = await fetch(`${base_url}/models`, {
94
- headers: {
95
- Authorization: `Bearer ${api_key}`,
96
- }
97
- });
98
  const res = await response.json() as any;
99
  return res.data.map((model: any) => ({
100
  name: model.id,
101
  label: model.id,
102
  provider: 'OpenAILike',
103
  }));
104
- }catch (e) {
105
- return []
106
- }
107
 
108
  }
109
  async function initializeModelList(): Promise<void> {
110
  const ollamaModels = await getOllamaModels();
111
  const openAiLikeModels = await getOpenAILikeModels();
112
- MODEL_LIST = [...ollamaModels,...openAiLikeModels, ...staticModels];
113
  }
114
  initializeModelList().then();
115
- export { getOllamaModels, getOpenAILikeModels, initializeModelList };
 
4
  export const WORK_DIR = `/home/${WORK_DIR_NAME}`;
5
  export const MODIFICATIONS_TAG_NAME = 'bolt_file_modifications';
6
  export const MODEL_REGEX = /^\[Model: (.*?)\]\n\n/;
7
+ export const PROVIDER_REGEX = /\[Provider: (.*?)\]\n\n/;
8
  export const DEFAULT_MODEL = 'claude-3-5-sonnet-latest';
9
  export const DEFAULT_PROVIDER = 'Anthropic';
10
 
 
20
  { name: 'qwen/qwen-110b-chat', label: 'OpenRouter Qwen 110b Chat (OpenRouter)', provider: 'OpenRouter' },
21
  { name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter' },
22
  { name: 'gemini-1.5-flash-latest', label: 'Gemini 1.5 Flash', provider: 'Google' },
23
+ { name: 'gemini-1.5-pro-latest', label: 'Gemini 1.5 Pro', provider: 'Google' },
24
  { name: 'llama-3.1-70b-versatile', label: 'Llama 3.1 70b (Groq)', provider: 'Groq' },
25
  { name: 'llama-3.1-8b-instant', label: 'Llama 3.1 8b (Groq)', provider: 'Groq' },
26
  { name: 'llama-3.2-11b-vision-preview', label: 'Llama 3.2 11b (Groq)', provider: 'Groq' },
 
59
  // Frontend always uses localhost
60
  return defaultBaseUrl;
61
  }
62
+
63
  // Backend: Check if we're running in Docker
64
  const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
65
+
66
+ return isDocker
67
  ? defaultBaseUrl.replace("localhost", "host.docker.internal")
68
  : defaultBaseUrl;
69
  };
 
85
  }
86
 
87
  async function getOpenAILikeModels(): Promise<ModelInfo[]> {
88
+ try {
89
+ const base_url = import.meta.env.OPENAI_LIKE_API_BASE_URL || "";
90
+ if (!base_url) {
91
  return [];
92
+ }
93
+ const api_key = import.meta.env.OPENAI_LIKE_API_KEY ?? "";
94
+ const response = await fetch(`${base_url}/models`, {
95
+ headers: {
96
+ Authorization: `Bearer ${api_key}`,
97
+ }
98
+ });
99
  const res = await response.json() as any;
100
  return res.data.map((model: any) => ({
101
  name: model.id,
102
  label: model.id,
103
  provider: 'OpenAILike',
104
  }));
105
+ } catch (e) {
106
+ return []
107
+ }
108
 
109
  }
110
  async function initializeModelList(): Promise<void> {
111
  const ollamaModels = await getOllamaModels();
112
  const openAiLikeModels = await getOpenAILikeModels();
113
+ MODEL_LIST = [...ollamaModels, ...openAiLikeModels, ...staticModels];
114
  }
115
  initializeModelList().then();
116
+ export { getOllamaModels, getOpenAILikeModels, initializeModelList };
package.json CHANGED
@@ -28,8 +28,8 @@
28
  "dependencies": {
29
  "@ai-sdk/anthropic": "^0.0.39",
30
  "@ai-sdk/google": "^0.0.52",
31
- "@ai-sdk/openai": "^0.0.66",
32
  "@ai-sdk/mistral": "^0.0.43",
 
33
  "@codemirror/autocomplete": "^6.17.0",
34
  "@codemirror/commands": "^6.6.0",
35
  "@codemirror/lang-cpp": "^6.0.2",
@@ -71,6 +71,7 @@
71
  "isbot": "^4.1.0",
72
  "istextorbinary": "^9.5.0",
73
  "jose": "^5.6.3",
 
74
  "jszip": "^3.10.1",
75
  "nanostores": "^0.10.3",
76
  "ollama-ai-provider": "^0.15.2",
@@ -94,6 +95,7 @@
94
  "@remix-run/dev": "^2.10.0",
95
  "@types/diff": "^5.2.1",
96
  "@types/file-saver": "^2.0.7",
 
97
  "@types/react": "^18.2.20",
98
  "@types/react-dom": "^18.2.7",
99
  "fast-glob": "^3.3.2",
 
28
  "dependencies": {
29
  "@ai-sdk/anthropic": "^0.0.39",
30
  "@ai-sdk/google": "^0.0.52",
 
31
  "@ai-sdk/mistral": "^0.0.43",
32
+ "@ai-sdk/openai": "^0.0.66",
33
  "@codemirror/autocomplete": "^6.17.0",
34
  "@codemirror/commands": "^6.6.0",
35
  "@codemirror/lang-cpp": "^6.0.2",
 
71
  "isbot": "^4.1.0",
72
  "istextorbinary": "^9.5.0",
73
  "jose": "^5.6.3",
74
+ "js-cookie": "^3.0.5",
75
  "jszip": "^3.10.1",
76
  "nanostores": "^0.10.3",
77
  "ollama-ai-provider": "^0.15.2",
 
95
  "@remix-run/dev": "^2.10.0",
96
  "@types/diff": "^5.2.1",
97
  "@types/file-saver": "^2.0.7",
98
+ "@types/js-cookie": "^3.0.6",
99
  "@types/react": "^18.2.20",
100
  "@types/react-dom": "^18.2.7",
101
  "fast-glob": "^3.3.2",
pnpm-lock.yaml CHANGED
@@ -146,6 +146,9 @@ importers:
146
  jose:
147
  specifier: ^5.6.3
148
  version: 5.6.3
 
 
 
149
  jszip:
150
  specifier: ^3.10.1
151
  version: 3.10.1
@@ -210,6 +213,9 @@ importers:
210
  '@types/file-saver':
211
  specifier: ^2.0.7
212
  version: 2.0.7
 
 
 
213
  '@types/react':
214
  specifier: ^18.2.20
215
  version: 18.3.3
@@ -1872,6 +1878,9 @@ packages:
1872
  '@types/[email protected]':
1873
  resolution: {integrity: sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==}
1874
 
 
 
 
1875
  '@types/[email protected]':
1876
  resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==}
1877
 
@@ -3455,6 +3464,10 @@ packages:
3455
3456
  resolution: {integrity: sha512-1Jh//hEEwMhNYPDDLwXHa2ePWgWiFNNUadVmguAAw2IJ6sj9mNxV5tGXJNqlMkJAybF6Lgw1mISDxTePP/187g==}
3457
 
 
 
 
 
3458
3459
  resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==}
3460
 
@@ -7248,6 +7261,8 @@ snapshots:
7248
  dependencies:
7249
  '@types/unist': 3.0.2
7250
 
 
 
7251
  '@types/[email protected]': {}
7252
 
7253
  '@types/[email protected]':
@@ -9211,6 +9226,8 @@ snapshots:
9211
 
9212
9213
 
 
 
9214
9215
 
9216
 
146
  jose:
147
  specifier: ^5.6.3
148
  version: 5.6.3
149
+ js-cookie:
150
+ specifier: ^3.0.5
151
+ version: 3.0.5
152
  jszip:
153
  specifier: ^3.10.1
154
  version: 3.10.1
 
213
  '@types/file-saver':
214
  specifier: ^2.0.7
215
  version: 2.0.7
216
+ '@types/js-cookie':
217
+ specifier: ^3.0.6
218
+ version: 3.0.6
219
  '@types/react':
220
  specifier: ^18.2.20
221
  version: 18.3.3
 
1878
  '@types/[email protected]':
1879
  resolution: {integrity: sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==}
1880
 
1881
+ '@types/[email protected]':
1882
+ resolution: {integrity: sha512-wkw9yd1kEXOPnvEeEV1Go1MmxtBJL0RR79aOTAApecWFVu7w0NNXNqhcWgvw2YgZDYadliXkl14pa3WXw5jlCQ==}
1883
+
1884
  '@types/[email protected]':
1885
  resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==}
1886
 
 
3464
3465
  resolution: {integrity: sha512-1Jh//hEEwMhNYPDDLwXHa2ePWgWiFNNUadVmguAAw2IJ6sj9mNxV5tGXJNqlMkJAybF6Lgw1mISDxTePP/187g==}
3466
 
3467
3468
+ resolution: {integrity: sha512-cEiJEAEoIbWfCZYKWhVwFuvPX1gETRYPw6LlaTKoxD3s2AkXzkCjnp6h0V77ozyqj0jakteJ4YqDJT830+lVGw==}
3469
+ engines: {node: '>=14'}
3470
+
3471
3472
  resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==}
3473
 
 
7261
  dependencies:
7262
  '@types/unist': 3.0.2
7263
 
7264
+ '@types/[email protected]': {}
7265
+
7266
  '@types/[email protected]': {}
7267
 
7268
  '@types/[email protected]':
 
9226
 
9227
9228
 
9229
9230
+
9231
9232
 
9233