codacus commited on
Commit
9d0066a
·
2 Parent(s): 34a8a39 115dcbb

Merge remote-tracking branch 'origin/main' into bundle-artifact

Browse files
.env.example CHANGED
@@ -38,12 +38,18 @@ OLLAMA_API_BASE_URL=
38
  # You only need this environment variable set if you want to use OpenAI Like models
39
  OPENAI_LIKE_API_BASE_URL=
40
 
 
 
 
41
  # You only need this environment variable set if you want to use DeepSeek models through their API
42
  DEEPSEEK_API_KEY=
43
 
44
  # Get your OpenAI Like API Key
45
  OPENAI_LIKE_API_KEY=
46
 
 
 
 
47
  # Get your Mistral API Key by following these instructions -
48
  # https://console.mistral.ai/api-keys/
49
  # You only need this environment variable set if you want to use Mistral models
 
38
  # You only need this environment variable set if you want to use OpenAI Like models
39
  OPENAI_LIKE_API_BASE_URL=
40
 
41
+ # You only need this environment variable set if you want to use Together AI models
42
+ TOGETHER_API_BASE_URL=
43
+
44
  # You only need this environment variable set if you want to use DeepSeek models through their API
45
  DEEPSEEK_API_KEY=
46
 
47
  # Get your OpenAI Like API Key
48
  OPENAI_LIKE_API_KEY=
49
 
50
+ # Get your Together API Key
51
+ TOGETHER_API_KEY=
52
+
53
  # Get your Mistral API Key by following these instructions -
54
  # https://console.mistral.ai/api-keys/
55
  # You only need this environment variable set if you want to use Mistral models
.github/workflows/stale.yml ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Mark Stale Issues and Pull Requests
2
+
3
+ on:
4
+ schedule:
5
+ - cron: '0 2 * * *' # Runs daily at 2:00 AM UTC
6
+ workflow_dispatch: # Allows manual triggering of the workflow
7
+
8
+ jobs:
9
+ stale:
10
+ runs-on: ubuntu-latest
11
+
12
+ steps:
13
+ - name: Mark stale issues and pull requests
14
+ uses: actions/stale@v8
15
+ with:
16
+ repo-token: ${{ secrets.GITHUB_TOKEN }}
17
+ stale-issue-message: "This issue has been marked as stale due to inactivity. If no further activity occurs, it will be closed in 7 days."
18
+ stale-pr-message: "This pull request has been marked as stale due to inactivity. If no further activity occurs, it will be closed in 7 days."
19
+ days-before-stale: 14 # Number of days before marking an issue or PR as stale
20
+ days-before-close: 7 # Number of days after being marked stale before closing
21
+ stale-issue-label: "stale" # Label to apply to stale issues
22
+ stale-pr-label: "stale" # Label to apply to stale pull requests
23
+ exempt-issue-labels: "pinned,important" # Issues with these labels won't be marked stale
24
+ exempt-pr-labels: "pinned,important" # PRs with these labels won't be marked stale
25
+ operations-per-run: 90 # Limits the number of actions per run to avoid API rate limits
.gitignore CHANGED
@@ -27,6 +27,7 @@ dist-ssr
27
  /build
28
  .env.local
29
  .env
 
30
  *.vars
31
  .wrangler
32
  _worker.bundle
 
27
  /build
28
  .env.local
29
  .env
30
+ .dev.vars
31
  *.vars
32
  .wrangler
33
  _worker.bundle
Dockerfile CHANGED
@@ -25,6 +25,8 @@ ARG ANTHROPIC_API_KEY
25
  ARG OPEN_ROUTER_API_KEY
26
  ARG GOOGLE_GENERATIVE_AI_API_KEY
27
  ARG OLLAMA_API_BASE_URL
 
 
28
  ARG VITE_LOG_LEVEL=debug
29
  ARG DEFAULT_NUM_CTX
30
 
@@ -36,6 +38,8 @@ ENV WRANGLER_SEND_METRICS=false \
36
  OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY} \
37
  GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY} \
38
  OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL} \
 
 
39
  VITE_LOG_LEVEL=${VITE_LOG_LEVEL} \
40
  DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX}
41
 
@@ -58,6 +62,8 @@ ARG ANTHROPIC_API_KEY
58
  ARG OPEN_ROUTER_API_KEY
59
  ARG GOOGLE_GENERATIVE_AI_API_KEY
60
  ARG OLLAMA_API_BASE_URL
 
 
61
  ARG VITE_LOG_LEVEL=debug
62
  ARG DEFAULT_NUM_CTX
63
 
@@ -68,6 +74,8 @@ ENV GROQ_API_KEY=${GROQ_API_KEY} \
68
  OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY} \
69
  GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY} \
70
  OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL} \
 
 
71
  VITE_LOG_LEVEL=${VITE_LOG_LEVEL} \
72
  DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX}
73
 
 
25
  ARG OPEN_ROUTER_API_KEY
26
  ARG GOOGLE_GENERATIVE_AI_API_KEY
27
  ARG OLLAMA_API_BASE_URL
28
+ ARG TOGETHER_API_KEY
29
+ ARG TOGETHER_API_BASE_URL
30
  ARG VITE_LOG_LEVEL=debug
31
  ARG DEFAULT_NUM_CTX
32
 
 
38
  OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY} \
39
  GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY} \
40
  OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL} \
41
+ TOGETHER_API_KEY=${TOGETHER_API_KEY} \
42
+ TOGETHER_API_BASE_URL=${TOGETHER_API_BASE_URL} \
43
  VITE_LOG_LEVEL=${VITE_LOG_LEVEL} \
44
  DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX}
45
 
 
62
  ARG OPEN_ROUTER_API_KEY
63
  ARG GOOGLE_GENERATIVE_AI_API_KEY
64
  ARG OLLAMA_API_BASE_URL
65
+ ARG TOGETHER_API_KEY
66
+ ARG TOGETHER_API_BASE_URL
67
  ARG VITE_LOG_LEVEL=debug
68
  ARG DEFAULT_NUM_CTX
69
 
 
74
  OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY} \
75
  GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY} \
76
  OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL} \
77
+ TOGETHER_API_KEY=${TOGETHER_API_KEY} \
78
+ TOGETHER_API_BASE_URL=${TOGETHER_API_BASE_URL} \
79
  VITE_LOG_LEVEL=${VITE_LOG_LEVEL} \
80
  DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX}
81
 
FAQ.md ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [![Bolt.new: AI-Powered Full-Stack Web Development in the Browser](./public/social_preview_index.jpg)](https://bolt.new)
2
+
3
+ # Bolt.new Fork by Cole Medin - oTToDev
4
+
5
+ ## FAQ
6
+
7
+ ### How do I get the best results with oTToDev?
8
+
9
+ - **Be specific about your stack**: If you want to use specific frameworks or libraries (like Astro, Tailwind, ShadCN, or any other popular JavaScript framework), mention them in your initial prompt to ensure Bolt scaffolds the project accordingly.
10
+
11
+ - **Use the enhance prompt icon**: Before sending your prompt, try clicking the 'enhance' icon to have the AI model help you refine your prompt, then edit the results before submitting.
12
+
13
+ - **Scaffold the basics first, then add features**: Make sure the basic structure of your application is in place before diving into more advanced functionality. This helps oTToDev understand the foundation of your project and ensure everything is wired up right before building out more advanced functionality.
14
+
15
+ - **Batch simple instructions**: Save time by combining simple instructions into one message. For example, you can ask oTToDev to change the color scheme, add mobile responsiveness, and restart the dev server, all in one go saving you time and reducing API credit consumption significantly.
16
+
17
+ ### Do you plan on merging oTToDev back into the official Bolt.new repo?
18
+
19
+ More news coming on this coming early next month - stay tuned!
20
+
21
+ ### Why are there so many open issues/pull requests?
22
+
23
+ oTToDev was started simply to showcase how to edit an open source project and to do something cool with local LLMs on my (@ColeMedin) YouTube channel! However, it quickly
24
+ grew into a massive community project that I am working hard to keep up with the demand of by forming a team of maintainers and getting as many people involved as I can.
25
+ That effort is going well and all of our maintainers are ABSOLUTE rockstars, but it still takes time to organize everything so we can efficiently get through all
26
+ the issues and PRs. But rest assured, we are working hard and even working on some partnerships behind the scenes to really help this project take off!
27
+
28
+ ### How do local LLMs fair compared to larger models like Claude 3.5 Sonnet for oTToDev/Bolt.new?
29
+
30
+ As much as the gap is quickly closing between open source and massive close source models, you’re still going to get the best results with the very large models like GPT-4o, Claude 3.5 Sonnet, and DeepSeek Coder V2 236b. This is one of the big tasks we have at hand - figuring out how to prompt better, use agents, and improve the platform as a whole to make it work better for even the smaller local LLMs!
31
+
32
+ ### I'm getting the error: "There was an error processing this request"
33
+
34
+ If you see this error within oTToDev, that is just the application telling you there is a problem at a high level, and this could mean a number of different things. To find the actual error, please check BOTH the terminal where you started the application (with Docker or pnpm) and the developer console in the browser. For most browsers, you can access the developer console by pressing F12 or right clicking anywhere in the browser and selecting “Inspect”. Then go to the “console” tab in the top right.
35
+
36
+ ### I'm getting the error: "x-api-key header missing"
37
+
38
+ We have seen this error a couple times and for some reason just restarting the Docker container has fixed it. This seems to be Ollama specific. Another thing to try is try to run oTToDev with Docker or pnpm, whichever you didn’t run first. We are still on the hunt for why this happens once and a while!
39
+
40
+ ### I'm getting a blank preview when oTToDev runs my app!
41
+
42
+ We promise you that we are constantly testing new PRs coming into oTToDev and the preview is core functionality, so the application is not broken! When you get a blank preview or don’t get a preview, this is generally because the LLM hallucinated bad code or incorrect commands. We are working on making this more transparent so it is obvious. Sometimes the error will appear in developer console too so check that as well.
43
+
44
+ ### How to add a LLM:
45
+
46
+ To make new LLMs available to use in this version of Bolt.new, head on over to `app/utils/constants.ts` and find the constant MODEL_LIST. Each element in this array is an object that has the model ID for the name (get this from the provider's API documentation), a label for the frontend model dropdown, and the provider.
47
+
48
+ By default, Anthropic, OpenAI, Groq, and Ollama are implemented as providers, but the YouTube video for this repo covers how to extend this to work with more providers if you wish!
49
+
50
+ When you add a new model to the MODEL_LIST array, it will immediately be available to use when you run the app locally or reload it. For Ollama models, make sure you have the model installed already before trying to use it here!
51
+
52
+ ### Everything works but the results are bad
53
+
54
+ This goes to the point above about how local LLMs are getting very powerful but you still are going to see better (sometimes much better) results with the largest LLMs like GPT-4o, Claude 3.5 Sonnet, and DeepSeek Coder V2 236b. If you are using smaller LLMs like Qwen-2.5-Coder, consider it more experimental and educational at this point. It can build smaller applications really well, which is super impressive for a local LLM, but for larger scale applications you want to use the larger LLMs still!
README.md CHANGED
@@ -4,11 +4,11 @@
4
 
5
  This fork of Bolt.new (oTToDev) allows you to choose the LLM that you use for each prompt! Currently, you can use OpenAI, Anthropic, Ollama, OpenRouter, Gemini, LMStudio, Mistral, xAI, HuggingFace, DeepSeek, or Groq models - and it is easily extended to use any other model supported by the Vercel AI SDK! See the instructions below for running this locally and extending it to include more models.
6
 
7
- Join the community for oTToDev!
8
 
9
  https://thinktank.ottomator.ai
10
 
11
- # Requested Additions to this Fork - Feel Free to Contribute!!
12
 
13
  - ✅ OpenRouter Integration (@coleam00)
14
  - ✅ Gemini Integration (@jonathands)
@@ -31,25 +31,25 @@ https://thinktank.ottomator.ai
31
  - ✅ Ability to revert code to earlier version (@wonderwhy-er)
32
  - ✅ Cohere Integration (@hasanraiyan)
33
  - ✅ Dynamic model max token length (@hasanraiyan)
 
 
 
 
 
 
34
  - ⬜ **HIGH PRIORITY** - Prevent Bolt from rewriting files as often (file locking and diffs)
35
  - ⬜ **HIGH PRIORITY** - Better prompting for smaller LLMs (code window sometimes doesn't start)
36
- - ⬜ **HIGH PRIORITY** - Load local projects into the app
37
- - ⬜ **HIGH PRIORITY** - Attach images to prompts
38
  - ⬜ **HIGH PRIORITY** - Run agents in the backend as opposed to a single model call
39
- - ⬜ Mobile friendly
40
- - ⬜ Together Integration
41
  - ⬜ Azure Open AI API Integration
42
  - ⬜ Perplexity Integration
43
  - ⬜ Vertex AI Integration
44
  - ⬜ Deploy directly to Vercel/Netlify/other similar platforms
45
- - ⬜ Prompt caching
46
- - ⬜ Better prompt enhancing
47
  - ⬜ Have LLM plan the project in a MD file for better results/transparency
48
  - ⬜ VSCode Integration with git-like confirmations
49
  - ⬜ Upload documents for knowledge - UI design templates, a code base to reference coding style, etc.
50
  - ⬜ Voice prompting
51
 
52
- # Bolt.new: AI-Powered Full-Stack Web Development in the Browser
53
 
54
  Bolt.new is an AI-powered web development agent that allows you to prompt, run, edit, and deploy full-stack applications directly from your browser—no local setup required. If you're here to build your own AI-powered web dev agent using the Bolt open source codebase, [click here to get started!](./CONTRIBUTING.md)
55
 
@@ -124,6 +124,13 @@ Optionally, you can set the debug level:
124
  VITE_LOG_LEVEL=debug
125
  ```
126
 
 
 
 
 
 
 
 
127
  **Important**: Never commit your `.env.local` file to version control. It's already included in .gitignore.
128
 
129
  ## Run with Docker
@@ -191,40 +198,6 @@ sudo npm install -g pnpm
191
  ```bash
192
  pnpm run dev
193
  ```
194
-
195
- ## Super Important Note on Running Ollama Models
196
-
197
- Ollama models by default only have 2048 tokens for their context window. Even for large models that can easily handle way more.
198
- This is not a large enough window to handle the Bolt.new/oTToDev prompt! You have to create a version of any model you want
199
- to use where you specify a larger context window. Luckily it's super easy to do that.
200
-
201
- All you have to do is:
202
-
203
- - Create a file called "Modelfile" (no file extension) anywhere on your computer
204
- - Put in the two lines:
205
-
206
- ```
207
- FROM [Ollama model ID such as qwen2.5-coder:7b]
208
- PARAMETER num_ctx 32768
209
- ```
210
-
211
- - Run the command:
212
-
213
- ```
214
- ollama create -f Modelfile [your new model ID, can be whatever you want (example: qwen2.5-coder-extra-ctx:7b)]
215
- ```
216
-
217
- Now you have a new Ollama model that isn't heavily limited in the context length like Ollama models are by default for some reason.
218
- You'll see this new model in the list of Ollama models along with all the others you pulled!
219
-
220
- ## Adding New LLMs:
221
-
222
- To make new LLMs available to use in this version of Bolt.new, head on over to `app/utils/constants.ts` and find the constant MODEL_LIST. Each element in this array is an object that has the model ID for the name (get this from the provider's API documentation), a label for the frontend model dropdown, and the provider.
223
-
224
- By default, Anthropic, OpenAI, Groq, and Ollama are implemented as providers, but the YouTube video for this repo covers how to extend this to work with more providers if you wish!
225
-
226
- When you add a new model to the MODEL_LIST array, it will immediately be available to use when you run the app locally or reload it. For Ollama models, make sure you have the model installed already before trying to use it here!
227
-
228
  ## Available Scripts
229
 
230
  - `pnpm run dev`: Starts the development server.
@@ -235,6 +208,7 @@ When you add a new model to the MODEL_LIST array, it will immediately be availab
235
  - `pnpm run typecheck`: Runs TypeScript type checking.
236
  - `pnpm run typegen`: Generates TypeScript types using Wrangler.
237
  - `pnpm run deploy`: Builds the project and deploys it to Cloudflare Pages.
 
238
 
239
  ## Development
240
 
@@ -246,55 +220,16 @@ pnpm run dev
246
 
247
  This will start the Remix Vite development server. You will need Google Chrome Canary to run this locally if you use Chrome! It's an easy install and a good browser for web development anyway.
248
 
249
- ## FAQ
250
-
251
- ### How do I get the best results with oTToDev?
252
-
253
- - **Be specific about your stack**: If you want to use specific frameworks or libraries (like Astro, Tailwind, ShadCN, or any other popular JavaScript framework), mention them in your initial prompt to ensure Bolt scaffolds the project accordingly.
254
-
255
- - **Use the enhance prompt icon**: Before sending your prompt, try clicking the 'enhance' icon to have the AI model help you refine your prompt, then edit the results before submitting.
256
-
257
- - **Scaffold the basics first, then add features**: Make sure the basic structure of your application is in place before diving into more advanced functionality. This helps oTToDev understand the foundation of your project and ensure everything is wired up right before building out more advanced functionality.
258
-
259
- - **Batch simple instructions**: Save time by combining simple instructions into one message. For example, you can ask oTToDev to change the color scheme, add mobile responsiveness, and restart the dev server, all in one go saving you time and reducing API credit consumption significantly.
260
-
261
- ### How do I contribute to oTToDev?
262
 
263
  [Please check out our dedicated page for contributing to oTToDev here!](CONTRIBUTING.md)
264
 
265
- ### Do you plan on merging oTToDev back into the official Bolt.new repo?
266
-
267
- More news coming on this coming early next month - stay tuned!
268
-
269
- ### What are the future plans for oTToDev?
270
 
271
  [Check out our Roadmap here!](https://roadmap.sh/r/ottodev-roadmap-2ovzo)
272
 
273
  Lot more updates to this roadmap coming soon!
274
 
275
- ### Why are there so many open issues/pull requests?
276
-
277
- oTToDev was started simply to showcase how to edit an open source project and to do something cool with local LLMs on my (@ColeMedin) YouTube channel! However, it quickly
278
- grew into a massive community project that I am working hard to keep up with the demand of by forming a team of maintainers and getting as many people involved as I can.
279
- That effort is going well and all of our maintainers are ABSOLUTE rockstars, but it still takes time to organize everything so we can efficiently get through all
280
- the issues and PRs. But rest assured, we are working hard and even working on some partnerships behind the scenes to really help this project take off!
281
-
282
- ### How do local LLMs fair compared to larger models like Claude 3.5 Sonnet for oTToDev/Bolt.new?
283
-
284
- As much as the gap is quickly closing between open source and massive close source models, you’re still going to get the best results with the very large models like GPT-4o, Claude 3.5 Sonnet, and DeepSeek Coder V2 236b. This is one of the big tasks we have at hand - figuring out how to prompt better, use agents, and improve the platform as a whole to make it work better for even the smaller local LLMs!
285
-
286
- ### I'm getting the error: "There was an error processing this request"
287
-
288
- If you see this error within oTToDev, that is just the application telling you there is a problem at a high level, and this could mean a number of different things. To find the actual error, please check BOTH the terminal where you started the application (with Docker or pnpm) and the developer console in the browser. For most browsers, you can access the developer console by pressing F12 or right clicking anywhere in the browser and selecting “Inspect”. Then go to the “console” tab in the top right.
289
-
290
- ### I'm getting the error: "x-api-key header missing"
291
-
292
- We have seen this error a couple times and for some reason just restarting the Docker container has fixed it. This seems to be Ollama specific. Another thing to try is try to run oTToDev with Docker or pnpm, whichever you didn’t run first. We are still on the hunt for why this happens once and a while!
293
-
294
- ### I'm getting a blank preview when oTToDev runs my app!
295
-
296
- We promise you that we are constantly testing new PRs coming into oTToDev and the preview is core functionality, so the application is not broken! When you get a blank preview or don’t get a preview, this is generally because the LLM hallucinated bad code or incorrect commands. We are working on making this more transparent so it is obvious. Sometimes the error will appear in developer console too so check that as well.
297
-
298
- ### Everything works but the results are bad
299
 
300
- This goes to the point above about how local LLMs are getting very powerful but you still are going to see better (sometimes much better) results with the largest LLMs like GPT-4o, Claude 3.5 Sonnet, and DeepSeek Coder V2 236b. If you are using smaller LLMs like Qwen-2.5-Coder, consider it more experimental and educational at this point. It can build smaller applications really well, which is super impressive for a local LLM, but for larger scale applications you want to use the larger LLMs still!
 
4
 
5
  This fork of Bolt.new (oTToDev) allows you to choose the LLM that you use for each prompt! Currently, you can use OpenAI, Anthropic, Ollama, OpenRouter, Gemini, LMStudio, Mistral, xAI, HuggingFace, DeepSeek, or Groq models - and it is easily extended to use any other model supported by the Vercel AI SDK! See the instructions below for running this locally and extending it to include more models.
6
 
7
+ ## Join the community for oTToDev!
8
 
9
  https://thinktank.ottomator.ai
10
 
11
+ ## Requested Additions - Feel Free to Contribute!
12
 
13
  - ✅ OpenRouter Integration (@coleam00)
14
  - ✅ Gemini Integration (@jonathands)
 
31
  - ✅ Ability to revert code to earlier version (@wonderwhy-er)
32
  - ✅ Cohere Integration (@hasanraiyan)
33
  - ✅ Dynamic model max token length (@hasanraiyan)
34
+ - ✅ Prompt caching (@SujalXplores)
35
+ - ✅ Load local projects into the app (@wonderwhy-er)
36
+ - ✅ Together Integration (@mouimet-infinisoft)
37
+ - ✅ Mobile friendly (@qwikode)
38
+ - ✅ Better prompt enhancing (@SujalXplores)
39
+ - ⬜ **HIGH PRIORITY** - ALMOST DONE - Attach images to prompts (@atrokhym)
40
  - ⬜ **HIGH PRIORITY** - Prevent Bolt from rewriting files as often (file locking and diffs)
41
  - ⬜ **HIGH PRIORITY** - Better prompting for smaller LLMs (code window sometimes doesn't start)
 
 
42
  - ⬜ **HIGH PRIORITY** - Run agents in the backend as opposed to a single model call
 
 
43
  - ⬜ Azure Open AI API Integration
44
  - ⬜ Perplexity Integration
45
  - ⬜ Vertex AI Integration
46
  - ⬜ Deploy directly to Vercel/Netlify/other similar platforms
 
 
47
  - ⬜ Have LLM plan the project in a MD file for better results/transparency
48
  - ⬜ VSCode Integration with git-like confirmations
49
  - ⬜ Upload documents for knowledge - UI design templates, a code base to reference coding style, etc.
50
  - ⬜ Voice prompting
51
 
52
+ ## Bolt.new: AI-Powered Full-Stack Web Development in the Browser
53
 
54
  Bolt.new is an AI-powered web development agent that allows you to prompt, run, edit, and deploy full-stack applications directly from your browser—no local setup required. If you're here to build your own AI-powered web dev agent using the Bolt open source codebase, [click here to get started!](./CONTRIBUTING.md)
55
 
 
124
  VITE_LOG_LEVEL=debug
125
  ```
126
 
127
+ And if using Ollama set the DEFAULT_NUM_CTX, the example below uses 8K context and ollama running on localhost port 11434:
128
+
129
+ ```
130
+ OLLAMA_API_BASE_URL=http://localhost:11434
131
+ DEFAULT_NUM_CTX=8192
132
+ ```
133
+
134
  **Important**: Never commit your `.env.local` file to version control. It's already included in .gitignore.
135
 
136
  ## Run with Docker
 
198
  ```bash
199
  pnpm run dev
200
  ```
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
201
  ## Available Scripts
202
 
203
  - `pnpm run dev`: Starts the development server.
 
208
  - `pnpm run typecheck`: Runs TypeScript type checking.
209
  - `pnpm run typegen`: Generates TypeScript types using Wrangler.
210
  - `pnpm run deploy`: Builds the project and deploys it to Cloudflare Pages.
211
+ - `pnpm run lint:fix`: Runs the linter and automatically fixes issues according to your ESLint configuration.
212
 
213
  ## Development
214
 
 
220
 
221
  This will start the Remix Vite development server. You will need Google Chrome Canary to run this locally if you use Chrome! It's an easy install and a good browser for web development anyway.
222
 
223
+ ## How do I contribute to oTToDev?
 
 
 
 
 
 
 
 
 
 
 
 
224
 
225
  [Please check out our dedicated page for contributing to oTToDev here!](CONTRIBUTING.md)
226
 
227
+ ## What are the future plans for oTToDev?
 
 
 
 
228
 
229
  [Check out our Roadmap here!](https://roadmap.sh/r/ottodev-roadmap-2ovzo)
230
 
231
  Lot more updates to this roadmap coming soon!
232
 
233
+ ## FAQ
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
234
 
235
+ [Please check out our dedicated page for FAQ's related to oTToDev here!](FAQ.md)
app/components/chat/BaseChat.module.scss CHANGED
@@ -17,3 +17,107 @@
17
  .Chat {
18
  opacity: 1;
19
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  .Chat {
18
  opacity: 1;
19
  }
20
+
21
+ .RayContainer {
22
+ --gradient-opacity: 0.85;
23
+ --ray-gradient: radial-gradient(rgba(83, 196, 255, var(--gradient-opacity)) 0%, rgba(43, 166, 255, 0) 100%);
24
+ transition: opacity 0.25s linear;
25
+ position: fixed;
26
+ inset: 0;
27
+ pointer-events: none;
28
+ user-select: none;
29
+ }
30
+
31
+ .LightRayOne {
32
+ width: 480px;
33
+ height: 680px;
34
+ transform: rotate(80deg);
35
+ top: -540px;
36
+ left: 250px;
37
+ filter: blur(110px);
38
+ position: absolute;
39
+ border-radius: 100%;
40
+ background: var(--ray-gradient);
41
+ }
42
+
43
+ .LightRayTwo {
44
+ width: 110px;
45
+ height: 400px;
46
+ transform: rotate(-20deg);
47
+ top: -280px;
48
+ left: 350px;
49
+ mix-blend-mode: overlay;
50
+ opacity: 0.6;
51
+ filter: blur(60px);
52
+ position: absolute;
53
+ border-radius: 100%;
54
+ background: var(--ray-gradient);
55
+ }
56
+
57
+ .LightRayThree {
58
+ width: 400px;
59
+ height: 370px;
60
+ top: -350px;
61
+ left: 200px;
62
+ mix-blend-mode: overlay;
63
+ opacity: 0.6;
64
+ filter: blur(21px);
65
+ position: absolute;
66
+ border-radius: 100%;
67
+ background: var(--ray-gradient);
68
+ }
69
+
70
+ .LightRayFour {
71
+ position: absolute;
72
+ width: 330px;
73
+ height: 370px;
74
+ top: -330px;
75
+ left: 50px;
76
+ mix-blend-mode: overlay;
77
+ opacity: 0.5;
78
+ filter: blur(21px);
79
+ border-radius: 100%;
80
+ background: var(--ray-gradient);
81
+ }
82
+
83
+ .LightRayFive {
84
+ position: absolute;
85
+ width: 110px;
86
+ height: 400px;
87
+ transform: rotate(-40deg);
88
+ top: -280px;
89
+ left: -10px;
90
+ mix-blend-mode: overlay;
91
+ opacity: 0.8;
92
+ filter: blur(60px);
93
+ border-radius: 100%;
94
+ background: var(--ray-gradient);
95
+ }
96
+
97
+ .PromptEffectContainer {
98
+ --prompt-container-offset: 50px;
99
+ --prompt-line-stroke-width: 1px;
100
+ position: absolute;
101
+ pointer-events: none;
102
+ inset: calc(var(--prompt-container-offset) / -2);
103
+ width: calc(100% + var(--prompt-container-offset));
104
+ height: calc(100% + var(--prompt-container-offset));
105
+ }
106
+
107
+ .PromptEffectLine {
108
+ width: calc(100% - var(--prompt-container-offset) + var(--prompt-line-stroke-width));
109
+ height: calc(100% - var(--prompt-container-offset) + var(--prompt-line-stroke-width));
110
+ x: calc(var(--prompt-container-offset) / 2 - var(--prompt-line-stroke-width) / 2);
111
+ y: calc(var(--prompt-container-offset) / 2 - var(--prompt-line-stroke-width) / 2);
112
+ rx: calc(8px - var(--prompt-line-stroke-width));
113
+ fill: transparent;
114
+ stroke-width: var(--prompt-line-stroke-width);
115
+ stroke: url(#line-gradient);
116
+ stroke-dasharray: 35px 65px;
117
+ stroke-dashoffset: 10;
118
+ }
119
+
120
+ .PromptShine {
121
+ fill: url(#shine-gradient);
122
+ mix-blend-mode: overlay;
123
+ }
app/components/chat/BaseChat.tsx CHANGED
@@ -47,7 +47,7 @@ const ModelSelector = ({ model, setModel, provider, setProvider, modelList, prov
47
  key={provider?.name}
48
  value={model}
49
  onChange={(e) => setModel(e.target.value)}
50
- className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all lg:max-w-[70%] "
51
  >
52
  {[...modelList]
53
  .filter((e) => e.provider == provider?.name && e.name)
@@ -116,6 +116,7 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
116
  const TEXTAREA_MAX_HEIGHT = chatStarted ? 400 : 200;
117
  const [apiKeys, setApiKeys] = useState<Record<string, string>>({});
118
  const [modelList, setModelList] = useState(MODEL_LIST);
 
119
 
120
  useEffect(() => {
121
  // Load API keys from cookies on component mount
@@ -167,6 +168,13 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
167
  )}
168
  data-chat-visible={showChat}
169
  >
 
 
 
 
 
 
 
170
  <ClientOnly>{() => <Menu />}</ClientOnly>
171
  <div ref={scrollRef} className="flex flex-col lg:flex-row overflow-y-auto w-full h-full">
172
  <div className={classNames(styles.Chat, 'flex flex-col flex-grow lg:min-w-[var(--chat-min-width)] h-full')}>
@@ -199,39 +207,85 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
199
  </ClientOnly>
200
  <div
201
  className={classNames(
202
- ' bg-bolt-elements-background-depth-2 p-3 rounded-lg border border-bolt-elements-borderColor relative w-full max-w-chat mx-auto z-prompt mb-6',
203
  {
204
  'sticky bottom-2': chatStarted,
205
  },
206
  )}
207
  >
208
- <ModelSelector
209
- key={provider?.name + ':' + modelList.length}
210
- model={model}
211
- setModel={setModel}
212
- modelList={modelList}
213
- provider={provider}
214
- setProvider={setProvider}
215
- providerList={PROVIDER_LIST}
216
- apiKeys={apiKeys}
217
- />
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
218
 
219
- {provider && (
220
- <APIKeyManager
221
- provider={provider}
222
- apiKey={apiKeys[provider.name] || ''}
223
- setApiKey={(key) => updateApiKey(provider.name, key)}
224
- />
225
- )}
 
 
 
 
 
 
 
 
 
 
 
 
 
226
 
227
  <div
228
  className={classNames(
229
- 'shadow-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background backdrop-filter backdrop-blur-[8px] rounded-lg overflow-hidden transition-all',
230
  )}
231
  >
232
  <textarea
233
  ref={textareaRef}
234
- className={`w-full pl-4 pt-4 pr-16 focus:outline-none focus:ring-0 focus:border-none focus:shadow-none resize-none text-md text-bolt-elements-textPrimary placeholder-bolt-elements-textTertiary bg-transparent transition-all`}
 
 
235
  onKeyDown={(event) => {
236
  if (event.key === 'Enter') {
237
  if (event.shiftKey) {
@@ -299,8 +353,8 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
299
  {input.length > 3 ? (
300
  <div className="text-xs text-bolt-elements-textTertiary">
301
  Use <kbd className="kdb px-1.5 py-0.5 rounded bg-bolt-elements-background-depth-2">Shift</kbd> +{' '}
302
- <kbd className="kdb px-1.5 py-0.5 rounded bg-bolt-elements-background-depth-2">Return</kbd> for
303
- a new line
304
  </div>
305
  ) : null}
306
  </div>
 
47
  key={provider?.name}
48
  value={model}
49
  onChange={(e) => setModel(e.target.value)}
50
+ className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all lg:max-w-[70%]"
51
  >
52
  {[...modelList]
53
  .filter((e) => e.provider == provider?.name && e.name)
 
116
  const TEXTAREA_MAX_HEIGHT = chatStarted ? 400 : 200;
117
  const [apiKeys, setApiKeys] = useState<Record<string, string>>({});
118
  const [modelList, setModelList] = useState(MODEL_LIST);
119
+ const [isModelSettingsCollapsed, setIsModelSettingsCollapsed] = useState(false);
120
 
121
  useEffect(() => {
122
  // Load API keys from cookies on component mount
 
168
  )}
169
  data-chat-visible={showChat}
170
  >
171
+ <div className={classNames(styles.RayContainer)}>
172
+ <div className={classNames(styles.LightRayOne)}></div>
173
+ <div className={classNames(styles.LightRayTwo)}></div>
174
+ <div className={classNames(styles.LightRayThree)}></div>
175
+ <div className={classNames(styles.LightRayFour)}></div>
176
+ <div className={classNames(styles.LightRayFive)}></div>
177
+ </div>
178
  <ClientOnly>{() => <Menu />}</ClientOnly>
179
  <div ref={scrollRef} className="flex flex-col lg:flex-row overflow-y-auto w-full h-full">
180
  <div className={classNames(styles.Chat, 'flex flex-col flex-grow lg:min-w-[var(--chat-min-width)] h-full')}>
 
207
  </ClientOnly>
208
  <div
209
  className={classNames(
210
+ 'bg-bolt-elements-background-depth-2 p-3 rounded-lg border border-bolt-elements-borderColor relative w-full max-w-chat mx-auto z-prompt mb-6',
211
  {
212
  'sticky bottom-2': chatStarted,
213
  },
214
  )}
215
  >
216
+ <svg className={classNames(styles.PromptEffectContainer)}>
217
+ <defs>
218
+ <linearGradient
219
+ id="line-gradient"
220
+ x1="20%"
221
+ y1="0%"
222
+ x2="-14%"
223
+ y2="10%"
224
+ gradientUnits="userSpaceOnUse"
225
+ gradientTransform="rotate(-45)"
226
+ >
227
+ <stop offset="0%" stopColor="#1488fc" stopOpacity="0%"></stop>
228
+ <stop offset="40%" stopColor="#1488fc" stopOpacity="80%"></stop>
229
+ <stop offset="50%" stopColor="#1488fc" stopOpacity="80%"></stop>
230
+ <stop offset="100%" stopColor="#1488fc" stopOpacity="0%"></stop>
231
+ </linearGradient>
232
+ <linearGradient id="shine-gradient">
233
+ <stop offset="0%" stopColor="white" stopOpacity="0%"></stop>
234
+ <stop offset="40%" stopColor="#8adaff" stopOpacity="80%"></stop>
235
+ <stop offset="50%" stopColor="#8adaff" stopOpacity="80%"></stop>
236
+ <stop offset="100%" stopColor="white" stopOpacity="0%"></stop>
237
+ </linearGradient>
238
+ </defs>
239
+ <rect className={classNames(styles.PromptEffectLine)} pathLength="100" strokeLinecap="round"></rect>
240
+ <rect className={classNames(styles.PromptShine)} x="48" y="24" width="70" height="1"></rect>
241
+ </svg>
242
+ <div>
243
+ <div className="flex justify-between items-center mb-2">
244
+ <button
245
+ onClick={() => setIsModelSettingsCollapsed(!isModelSettingsCollapsed)}
246
+ className={classNames('flex items-center gap-2 p-2 rounded-lg transition-all', {
247
+ 'bg-bolt-elements-item-backgroundAccent text-bolt-elements-item-contentAccent':
248
+ isModelSettingsCollapsed,
249
+ 'bg-bolt-elements-item-backgroundDefault text-bolt-elements-item-contentDefault':
250
+ !isModelSettingsCollapsed,
251
+ })}
252
+ >
253
+ <div className={`i-ph:caret-${isModelSettingsCollapsed ? 'right' : 'down'} text-lg`} />
254
+ <span>Model Settings</span>
255
+ </button>
256
+ </div>
257
 
258
+ <div className={isModelSettingsCollapsed ? 'hidden' : ''}>
259
+ <ModelSelector
260
+ key={provider?.name + ':' + modelList.length}
261
+ model={model}
262
+ setModel={setModel}
263
+ modelList={modelList}
264
+ provider={provider}
265
+ setProvider={setProvider}
266
+ providerList={PROVIDER_LIST}
267
+ apiKeys={apiKeys}
268
+ />
269
+ {provider && (
270
+ <APIKeyManager
271
+ provider={provider}
272
+ apiKey={apiKeys[provider.name] || ''}
273
+ setApiKey={(key) => updateApiKey(provider.name, key)}
274
+ />
275
+ )}
276
+ </div>
277
+ </div>
278
 
279
  <div
280
  className={classNames(
281
+ 'relative shadow-xs border border-bolt-elements-borderColor backdrop-blur rounded-lg',
282
  )}
283
  >
284
  <textarea
285
  ref={textareaRef}
286
+ className={
287
+ 'w-full pl-4 pt-4 pr-16 focus:outline-none resize-none text-bolt-elements-textPrimary placeholder-bolt-elements-textTertiary bg-transparent text-sm'
288
+ }
289
  onKeyDown={(event) => {
290
  if (event.key === 'Enter') {
291
  if (event.shiftKey) {
 
353
  {input.length > 3 ? (
354
  <div className="text-xs text-bolt-elements-textTertiary">
355
  Use <kbd className="kdb px-1.5 py-0.5 rounded bg-bolt-elements-background-depth-2">Shift</kbd> +{' '}
356
+ <kbd className="kdb px-1.5 py-0.5 rounded bg-bolt-elements-background-depth-2">Return</kbd> a
357
+ new line
358
  </div>
359
  ) : null}
360
  </div>
app/components/chat/Chat.client.tsx CHANGED
@@ -6,19 +6,20 @@ import { useStore } from '@nanostores/react';
6
  import type { Message } from 'ai';
7
  import { useChat } from 'ai/react';
8
  import { useAnimate } from 'framer-motion';
9
- import { memo, useEffect, useRef, useState } from 'react';
10
  import { cssTransition, toast, ToastContainer } from 'react-toastify';
11
  import { useMessageParser, usePromptEnhancer, useShortcuts, useSnapScroll } from '~/lib/hooks';
12
  import { description, useChatHistory } from '~/lib/persistence';
13
  import { chatStore } from '~/lib/stores/chat';
14
  import { workbenchStore } from '~/lib/stores/workbench';
15
  import { fileModificationsToHTML } from '~/utils/diff';
16
- import { DEFAULT_MODEL, DEFAULT_PROVIDER, PROVIDER_LIST } from '~/utils/constants';
17
  import { cubicEasingFn } from '~/utils/easings';
18
  import { createScopedLogger, renderLogger } from '~/utils/logger';
19
  import { BaseChat } from './BaseChat';
20
  import Cookies from 'js-cookie';
21
  import type { ProviderInfo } from '~/utils/types';
 
22
 
23
  const toastAnimation = cssTransition({
24
  enter: 'animated fadeInRight',
@@ -120,6 +121,7 @@ export const ChatImpl = memo(
120
  logger.debug('Finished streaming');
121
  },
122
  initialMessages,
 
123
  });
124
 
125
  const { enhancingPrompt, promptEnhanced, enhancePrompt, resetEnhancer } = usePromptEnhancer();
@@ -225,12 +227,33 @@ export const ChatImpl = memo(
225
  }
226
 
227
  setInput('');
 
228
 
229
  resetEnhancer();
230
 
231
  textareaRef.current?.blur();
232
  };
233
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
234
  const [messageRef, scrollRef] = useSnapScroll();
235
 
236
  useEffect(() => {
@@ -268,7 +291,10 @@ export const ChatImpl = memo(
268
  setProvider={handleProviderChange}
269
  messageRef={messageRef}
270
  scrollRef={scrollRef}
271
- handleInputChange={handleInputChange}
 
 
 
272
  handleStop={abort}
273
  description={description}
274
  importChat={importChat}
 
6
  import type { Message } from 'ai';
7
  import { useChat } from 'ai/react';
8
  import { useAnimate } from 'framer-motion';
9
+ import { memo, useCallback, useEffect, useRef, useState } from 'react';
10
  import { cssTransition, toast, ToastContainer } from 'react-toastify';
11
  import { useMessageParser, usePromptEnhancer, useShortcuts, useSnapScroll } from '~/lib/hooks';
12
  import { description, useChatHistory } from '~/lib/persistence';
13
  import { chatStore } from '~/lib/stores/chat';
14
  import { workbenchStore } from '~/lib/stores/workbench';
15
  import { fileModificationsToHTML } from '~/utils/diff';
16
+ import { DEFAULT_MODEL, DEFAULT_PROVIDER, PROMPT_COOKIE_KEY, PROVIDER_LIST } from '~/utils/constants';
17
  import { cubicEasingFn } from '~/utils/easings';
18
  import { createScopedLogger, renderLogger } from '~/utils/logger';
19
  import { BaseChat } from './BaseChat';
20
  import Cookies from 'js-cookie';
21
  import type { ProviderInfo } from '~/utils/types';
22
+ import { debounce } from '~/utils/debounce';
23
 
24
  const toastAnimation = cssTransition({
25
  enter: 'animated fadeInRight',
 
121
  logger.debug('Finished streaming');
122
  },
123
  initialMessages,
124
+ initialInput: Cookies.get(PROMPT_COOKIE_KEY) || '',
125
  });
126
 
127
  const { enhancingPrompt, promptEnhanced, enhancePrompt, resetEnhancer } = usePromptEnhancer();
 
227
  }
228
 
229
  setInput('');
230
+ Cookies.remove(PROMPT_COOKIE_KEY);
231
 
232
  resetEnhancer();
233
 
234
  textareaRef.current?.blur();
235
  };
236
 
237
+ /**
238
+ * Handles the change event for the textarea and updates the input state.
239
+ * @param event - The change event from the textarea.
240
+ */
241
+ const onTextareaChange = (event: React.ChangeEvent<HTMLTextAreaElement>) => {
242
+ handleInputChange(event);
243
+ };
244
+
245
+ /**
246
+ * Debounced function to cache the prompt in cookies.
247
+ * Caches the trimmed value of the textarea input after a delay to optimize performance.
248
+ */
249
+ const debouncedCachePrompt = useCallback(
250
+ debounce((event: React.ChangeEvent<HTMLTextAreaElement>) => {
251
+ const trimmedValue = event.target.value.trim();
252
+ Cookies.set(PROMPT_COOKIE_KEY, trimmedValue, { expires: 30 });
253
+ }, 1000),
254
+ [],
255
+ );
256
+
257
  const [messageRef, scrollRef] = useSnapScroll();
258
 
259
  useEffect(() => {
 
291
  setProvider={handleProviderChange}
292
  messageRef={messageRef}
293
  scrollRef={scrollRef}
294
+ handleInputChange={(e) => {
295
+ onTextareaChange(e);
296
+ debouncedCachePrompt(e);
297
+ }}
298
  handleStop={abort}
299
  description={description}
300
  importChat={importChat}
app/components/chat/ExamplePrompts.tsx CHANGED
@@ -5,13 +5,18 @@ const EXAMPLE_PROMPTS = [
5
  { text: 'Build a simple blog using Astro' },
6
  { text: 'Create a cookie consent form using Material UI' },
7
  { text: 'Make a space invaders game' },
8
- { text: 'How do I center a div?' },
9
  ];
10
 
11
  export function ExamplePrompts(sendMessage?: { (event: React.UIEvent, messageInput?: string): void | undefined }) {
12
  return (
13
- <div id="examples" className="relative w-full max-w-xl mx-auto mt-8 flex justify-center">
14
- <div className="flex flex-col space-y-2 [mask-image:linear-gradient(to_bottom,black_0%,transparent_180%)] hover:[mask-image:none]">
 
 
 
 
 
15
  {EXAMPLE_PROMPTS.map((examplePrompt, index: number) => {
16
  return (
17
  <button
@@ -19,10 +24,9 @@ export function ExamplePrompts(sendMessage?: { (event: React.UIEvent, messageInp
19
  onClick={(event) => {
20
  sendMessage?.(event, examplePrompt.text);
21
  }}
22
- className="group flex items-center w-full gap-2 justify-center bg-transparent text-bolt-elements-textTertiary hover:text-bolt-elements-textPrimary transition-theme"
23
  >
24
  {examplePrompt.text}
25
- <div className="i-ph:arrow-bend-down-left" />
26
  </button>
27
  );
28
  })}
 
5
  { text: 'Build a simple blog using Astro' },
6
  { text: 'Create a cookie consent form using Material UI' },
7
  { text: 'Make a space invaders game' },
8
+ { text: 'Make a Tic Tac Toe game in html, css and js only' },
9
  ];
10
 
11
  export function ExamplePrompts(sendMessage?: { (event: React.UIEvent, messageInput?: string): void | undefined }) {
12
  return (
13
+ <div id="examples" className="relative flex flex-col gap-9 w-full max-w-3xl mx-auto flex justify-center mt-6">
14
+ <div
15
+ className="flex flex-wrap justify-center gap-2"
16
+ style={{
17
+ animation: '.25s ease-out 0s 1 _fade-and-move-in_g2ptj_1 forwards',
18
+ }}
19
+ >
20
  {EXAMPLE_PROMPTS.map((examplePrompt, index: number) => {
21
  return (
22
  <button
 
24
  onClick={(event) => {
25
  sendMessage?.(event, examplePrompt.text);
26
  }}
27
+ className="border border-bolt-elements-borderColor rounded-full bg-gray-50 hover:bg-gray-100 dark:bg-gray-950 dark:hover:bg-gray-900 text-bolt-elements-textSecondary hover:text-bolt-elements-textPrimary px-3 py-1 text-xs transition-theme"
28
  >
29
  {examplePrompt.text}
 
30
  </button>
31
  );
32
  })}
app/components/chat/ImportFolderButton.tsx CHANGED
@@ -156,7 +156,7 @@ ${fileArtifacts.join('\n\n')}
156
  }}
157
  className={className}
158
  >
159
- <div className="i-ph:folder-simple-upload" />
160
  Import Folder
161
  </button>
162
  </>
 
156
  }}
157
  className={className}
158
  >
159
+ <div className="i-ph:upload-simple" />
160
  Import Folder
161
  </button>
162
  </>
app/components/chat/Markdown.spec.ts ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { describe, expect, it } from 'vitest';
2
+ import { stripCodeFenceFromArtifact } from './Markdown';
3
+
4
+ describe('stripCodeFenceFromArtifact', () => {
5
+ it('should remove code fences around artifact element', () => {
6
+ const input = "```xml\n<div class='__boltArtifact__'></div>\n```";
7
+ const expected = "\n<div class='__boltArtifact__'></div>\n";
8
+ expect(stripCodeFenceFromArtifact(input)).toBe(expected);
9
+ });
10
+
11
+ it('should handle code fence with language specification', () => {
12
+ const input = "```typescript\n<div class='__boltArtifact__'></div>\n```";
13
+ const expected = "\n<div class='__boltArtifact__'></div>\n";
14
+ expect(stripCodeFenceFromArtifact(input)).toBe(expected);
15
+ });
16
+
17
+ it('should not modify content without artifacts', () => {
18
+ const input = '```\nregular code block\n```';
19
+ expect(stripCodeFenceFromArtifact(input)).toBe(input);
20
+ });
21
+
22
+ it('should handle empty input', () => {
23
+ expect(stripCodeFenceFromArtifact('')).toBe('');
24
+ });
25
+
26
+ it('should handle artifact without code fences', () => {
27
+ const input = "<div class='__boltArtifact__'></div>";
28
+ expect(stripCodeFenceFromArtifact(input)).toBe(input);
29
+ });
30
+
31
+ it('should handle multiple artifacts but only remove fences around them', () => {
32
+ const input = [
33
+ 'Some text',
34
+ '```typescript',
35
+ "<div class='__boltArtifact__'></div>",
36
+ '```',
37
+ '```',
38
+ 'regular code',
39
+ '```',
40
+ ].join('\n');
41
+
42
+ const expected = ['Some text', '', "<div class='__boltArtifact__'></div>", '', '```', 'regular code', '```'].join(
43
+ '\n',
44
+ );
45
+
46
+ expect(stripCodeFenceFromArtifact(input)).toBe(expected);
47
+ });
48
+ });
app/components/chat/Markdown.tsx CHANGED
@@ -68,7 +68,51 @@ export const Markdown = memo(({ children, html = false, limitedMarkdown = false
68
  remarkPlugins={remarkPlugins(limitedMarkdown)}
69
  rehypePlugins={rehypePlugins(html)}
70
  >
71
- {children}
72
  </ReactMarkdown>
73
  );
74
  });
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
68
  remarkPlugins={remarkPlugins(limitedMarkdown)}
69
  rehypePlugins={rehypePlugins(html)}
70
  >
71
+ {stripCodeFenceFromArtifact(children)}
72
  </ReactMarkdown>
73
  );
74
  });
75
+
76
+ /**
77
+ * Removes code fence markers (```) surrounding an artifact element while preserving the artifact content.
78
+ * This is necessary because artifacts should not be wrapped in code blocks when rendered for rendering action list.
79
+ *
80
+ * @param content - The markdown content to process
81
+ * @returns The processed content with code fence markers removed around artifacts
82
+ *
83
+ * @example
84
+ * // Removes code fences around artifact
85
+ * const input = "```xml\n<div class='__boltArtifact__'></div>\n```";
86
+ * stripCodeFenceFromArtifact(input);
87
+ * // Returns: "\n<div class='__boltArtifact__'></div>\n"
88
+ *
89
+ * @remarks
90
+ * - Only removes code fences that directly wrap an artifact (marked with __boltArtifact__ class)
91
+ * - Handles code fences with optional language specifications (e.g. ```xml, ```typescript)
92
+ * - Preserves original content if no artifact is found
93
+ * - Safely handles edge cases like empty input or artifacts at start/end of content
94
+ */
95
+ export const stripCodeFenceFromArtifact = (content: string) => {
96
+ if (!content || !content.includes('__boltArtifact__')) {
97
+ return content;
98
+ }
99
+
100
+ const lines = content.split('\n');
101
+ const artifactLineIndex = lines.findIndex((line) => line.includes('__boltArtifact__'));
102
+
103
+ // Return original content if artifact line not found
104
+ if (artifactLineIndex === -1) {
105
+ return content;
106
+ }
107
+
108
+ // Check previous line for code fence
109
+ if (artifactLineIndex > 0 && lines[artifactLineIndex - 1]?.trim().match(/^```\w*$/)) {
110
+ lines[artifactLineIndex - 1] = '';
111
+ }
112
+
113
+ if (artifactLineIndex < lines.length - 1 && lines[artifactLineIndex + 1]?.trim().match(/^```$/)) {
114
+ lines[artifactLineIndex + 1] = '';
115
+ }
116
+
117
+ return lines.join('\n');
118
+ };
app/components/workbench/EditorPanel.tsx CHANGED
@@ -1,6 +1,6 @@
1
  import { useStore } from '@nanostores/react';
2
- import { memo, useEffect, useMemo, useRef, useState } from 'react';
3
- import { Panel, PanelGroup, PanelResizeHandle, type ImperativePanelHandle } from 'react-resizable-panels';
4
  import {
5
  CodeMirrorEditor,
6
  type EditorDocument,
@@ -9,21 +9,17 @@ import {
9
  type OnSaveCallback as OnEditorSave,
10
  type OnScrollCallback as OnEditorScroll,
11
  } from '~/components/editor/codemirror/CodeMirrorEditor';
12
- import { IconButton } from '~/components/ui/IconButton';
13
  import { PanelHeader } from '~/components/ui/PanelHeader';
14
  import { PanelHeaderButton } from '~/components/ui/PanelHeaderButton';
15
- import { shortcutEventEmitter } from '~/lib/hooks';
16
  import type { FileMap } from '~/lib/stores/files';
17
  import { themeStore } from '~/lib/stores/theme';
18
- import { workbenchStore } from '~/lib/stores/workbench';
19
- import { classNames } from '~/utils/classNames';
20
  import { WORK_DIR } from '~/utils/constants';
21
- import { logger, renderLogger } from '~/utils/logger';
22
  import { isMobile } from '~/utils/mobile';
23
  import { FileBreadcrumb } from './FileBreadcrumb';
24
  import { FileTree } from './FileTree';
25
- import { Terminal, type TerminalRef } from './terminal/Terminal';
26
- import React from 'react';
27
 
28
  interface EditorPanelProps {
29
  files?: FileMap;
@@ -38,8 +34,6 @@ interface EditorPanelProps {
38
  onFileReset?: () => void;
39
  }
40
 
41
- const MAX_TERMINALS = 3;
42
- const DEFAULT_TERMINAL_SIZE = 25;
43
  const DEFAULT_EDITOR_SIZE = 100 - DEFAULT_TERMINAL_SIZE;
44
 
45
  const editorSettings: EditorSettings = { tabSize: 2 };
@@ -62,13 +56,6 @@ export const EditorPanel = memo(
62
  const theme = useStore(themeStore);
63
  const showTerminal = useStore(workbenchStore.showTerminal);
64
 
65
- const terminalRefs = useRef<Array<TerminalRef | null>>([]);
66
- const terminalPanelRef = useRef<ImperativePanelHandle>(null);
67
- const terminalToggledByShortcut = useRef(false);
68
-
69
- const [activeTerminal, setActiveTerminal] = useState(0);
70
- const [terminalCount, setTerminalCount] = useState(1);
71
-
72
  const activeFileSegments = useMemo(() => {
73
  if (!editorDocument) {
74
  return undefined;
@@ -81,48 +68,6 @@ export const EditorPanel = memo(
81
  return editorDocument !== undefined && unsavedFiles?.has(editorDocument.filePath);
82
  }, [editorDocument, unsavedFiles]);
83
 
84
- useEffect(() => {
85
- const unsubscribeFromEventEmitter = shortcutEventEmitter.on('toggleTerminal', () => {
86
- terminalToggledByShortcut.current = true;
87
- });
88
-
89
- const unsubscribeFromThemeStore = themeStore.subscribe(() => {
90
- for (const ref of Object.values(terminalRefs.current)) {
91
- ref?.reloadStyles();
92
- }
93
- });
94
-
95
- return () => {
96
- unsubscribeFromEventEmitter();
97
- unsubscribeFromThemeStore();
98
- };
99
- }, []);
100
-
101
- useEffect(() => {
102
- const { current: terminal } = terminalPanelRef;
103
-
104
- if (!terminal) {
105
- return;
106
- }
107
-
108
- const isCollapsed = terminal.isCollapsed();
109
-
110
- if (!showTerminal && !isCollapsed) {
111
- terminal.collapse();
112
- } else if (showTerminal && isCollapsed) {
113
- terminal.resize(DEFAULT_TERMINAL_SIZE);
114
- }
115
-
116
- terminalToggledByShortcut.current = false;
117
- }, [showTerminal]);
118
-
119
- const addTerminal = () => {
120
- if (terminalCount < MAX_TERMINALS) {
121
- setTerminalCount(terminalCount + 1);
122
- setActiveTerminal(terminalCount);
123
- }
124
- };
125
-
126
  return (
127
  <PanelGroup direction="vertical">
128
  <Panel defaultSize={showTerminal ? DEFAULT_EDITOR_SIZE : 100} minSize={20}>
@@ -181,118 +126,7 @@ export const EditorPanel = memo(
181
  </PanelGroup>
182
  </Panel>
183
  <PanelResizeHandle />
184
- <Panel
185
- ref={terminalPanelRef}
186
- defaultSize={showTerminal ? DEFAULT_TERMINAL_SIZE : 0}
187
- minSize={10}
188
- collapsible
189
- onExpand={() => {
190
- if (!terminalToggledByShortcut.current) {
191
- workbenchStore.toggleTerminal(true);
192
- }
193
- }}
194
- onCollapse={() => {
195
- if (!terminalToggledByShortcut.current) {
196
- workbenchStore.toggleTerminal(false);
197
- }
198
- }}
199
- >
200
- <div className="h-full">
201
- <div className="bg-bolt-elements-terminals-background h-full flex flex-col">
202
- <div className="flex items-center bg-bolt-elements-background-depth-2 border-y border-bolt-elements-borderColor gap-1.5 min-h-[34px] p-2">
203
- {Array.from({ length: terminalCount + 1 }, (_, index) => {
204
- const isActive = activeTerminal === index;
205
-
206
- return (
207
- <React.Fragment key={index}>
208
- {index == 0 ? (
209
- <button
210
- key={index}
211
- className={classNames(
212
- 'flex items-center text-sm cursor-pointer gap-1.5 px-3 py-2 h-full whitespace-nowrap rounded-full',
213
- {
214
- 'bg-bolt-elements-terminals-buttonBackground text-bolt-elements-textSecondary hover:text-bolt-elements-textPrimary':
215
- isActive,
216
- 'bg-bolt-elements-background-depth-2 text-bolt-elements-textSecondary hover:bg-bolt-elements-terminals-buttonBackground':
217
- !isActive,
218
- },
219
- )}
220
- onClick={() => setActiveTerminal(index)}
221
- >
222
- <div className="i-ph:terminal-window-duotone text-lg" />
223
- Bolt Terminal
224
- </button>
225
- ) : (
226
- <React.Fragment>
227
- <button
228
- key={index}
229
- className={classNames(
230
- 'flex items-center text-sm cursor-pointer gap-1.5 px-3 py-2 h-full whitespace-nowrap rounded-full',
231
- {
232
- 'bg-bolt-elements-terminals-buttonBackground text-bolt-elements-textPrimary': isActive,
233
- 'bg-bolt-elements-background-depth-2 text-bolt-elements-textSecondary hover:bg-bolt-elements-terminals-buttonBackground':
234
- !isActive,
235
- },
236
- )}
237
- onClick={() => setActiveTerminal(index)}
238
- >
239
- <div className="i-ph:terminal-window-duotone text-lg" />
240
- Terminal {terminalCount > 1 && index}
241
- </button>
242
- </React.Fragment>
243
- )}
244
- </React.Fragment>
245
- );
246
- })}
247
- {terminalCount < MAX_TERMINALS && <IconButton icon="i-ph:plus" size="md" onClick={addTerminal} />}
248
- <IconButton
249
- className="ml-auto"
250
- icon="i-ph:caret-down"
251
- title="Close"
252
- size="md"
253
- onClick={() => workbenchStore.toggleTerminal(false)}
254
- />
255
- </div>
256
- {Array.from({ length: terminalCount + 1 }, (_, index) => {
257
- const isActive = activeTerminal === index;
258
-
259
- if (index == 0) {
260
- logger.info('Starting bolt terminal');
261
-
262
- return (
263
- <Terminal
264
- key={index}
265
- className={classNames('h-full overflow-hidden', {
266
- hidden: !isActive,
267
- })}
268
- ref={(ref) => {
269
- terminalRefs.current.push(ref);
270
- }}
271
- onTerminalReady={(terminal) => workbenchStore.attachBoltTerminal(terminal)}
272
- onTerminalResize={(cols, rows) => workbenchStore.onTerminalResize(cols, rows)}
273
- theme={theme}
274
- />
275
- );
276
- }
277
-
278
- return (
279
- <Terminal
280
- key={index}
281
- className={classNames('h-full overflow-hidden', {
282
- hidden: !isActive,
283
- })}
284
- ref={(ref) => {
285
- terminalRefs.current.push(ref);
286
- }}
287
- onTerminalReady={(terminal) => workbenchStore.attachTerminal(terminal)}
288
- onTerminalResize={(cols, rows) => workbenchStore.onTerminalResize(cols, rows)}
289
- theme={theme}
290
- />
291
- );
292
- })}
293
- </div>
294
- </div>
295
- </Panel>
296
  </PanelGroup>
297
  );
298
  },
 
1
  import { useStore } from '@nanostores/react';
2
+ import { memo, useMemo } from 'react';
3
+ import { Panel, PanelGroup, PanelResizeHandle } from 'react-resizable-panels';
4
  import {
5
  CodeMirrorEditor,
6
  type EditorDocument,
 
9
  type OnSaveCallback as OnEditorSave,
10
  type OnScrollCallback as OnEditorScroll,
11
  } from '~/components/editor/codemirror/CodeMirrorEditor';
 
12
  import { PanelHeader } from '~/components/ui/PanelHeader';
13
  import { PanelHeaderButton } from '~/components/ui/PanelHeaderButton';
 
14
  import type { FileMap } from '~/lib/stores/files';
15
  import { themeStore } from '~/lib/stores/theme';
 
 
16
  import { WORK_DIR } from '~/utils/constants';
17
+ import { renderLogger } from '~/utils/logger';
18
  import { isMobile } from '~/utils/mobile';
19
  import { FileBreadcrumb } from './FileBreadcrumb';
20
  import { FileTree } from './FileTree';
21
+ import { DEFAULT_TERMINAL_SIZE, TerminalTabs } from './terminal/TerminalTabs';
22
+ import { workbenchStore } from '~/lib/stores/workbench';
23
 
24
  interface EditorPanelProps {
25
  files?: FileMap;
 
34
  onFileReset?: () => void;
35
  }
36
 
 
 
37
  const DEFAULT_EDITOR_SIZE = 100 - DEFAULT_TERMINAL_SIZE;
38
 
39
  const editorSettings: EditorSettings = { tabSize: 2 };
 
56
  const theme = useStore(themeStore);
57
  const showTerminal = useStore(workbenchStore.showTerminal);
58
 
 
 
 
 
 
 
 
59
  const activeFileSegments = useMemo(() => {
60
  if (!editorDocument) {
61
  return undefined;
 
68
  return editorDocument !== undefined && unsavedFiles?.has(editorDocument.filePath);
69
  }, [editorDocument, unsavedFiles]);
70
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
71
  return (
72
  <PanelGroup direction="vertical">
73
  <Panel defaultSize={showTerminal ? DEFAULT_EDITOR_SIZE : 100} minSize={20}>
 
126
  </PanelGroup>
127
  </Panel>
128
  <PanelResizeHandle />
129
+ <TerminalTabs />
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
130
  </PanelGroup>
131
  );
132
  },
app/components/workbench/terminal/Terminal.tsx CHANGED
@@ -16,71 +16,74 @@ export interface TerminalProps {
16
  className?: string;
17
  theme: Theme;
18
  readonly?: boolean;
 
19
  onTerminalReady?: (terminal: XTerm) => void;
20
  onTerminalResize?: (cols: number, rows: number) => void;
21
  }
22
 
23
  export const Terminal = memo(
24
- forwardRef<TerminalRef, TerminalProps>(({ className, theme, readonly, onTerminalReady, onTerminalResize }, ref) => {
25
- const terminalElementRef = useRef<HTMLDivElement>(null);
26
- const terminalRef = useRef<XTerm>();
27
-
28
- useEffect(() => {
29
- const element = terminalElementRef.current!;
30
-
31
- const fitAddon = new FitAddon();
32
- const webLinksAddon = new WebLinksAddon();
33
-
34
- const terminal = new XTerm({
35
- cursorBlink: true,
36
- convertEol: true,
37
- disableStdin: readonly,
38
- theme: getTerminalTheme(readonly ? { cursor: '#00000000' } : {}),
39
- fontSize: 12,
40
- fontFamily: 'Menlo, courier-new, courier, monospace',
41
- });
42
-
43
- terminalRef.current = terminal;
44
-
45
- terminal.loadAddon(fitAddon);
46
- terminal.loadAddon(webLinksAddon);
47
- terminal.open(element);
48
-
49
- const resizeObserver = new ResizeObserver(() => {
50
- fitAddon.fit();
51
- onTerminalResize?.(terminal.cols, terminal.rows);
52
- });
53
-
54
- resizeObserver.observe(element);
55
-
56
- logger.info('Attach terminal');
57
-
58
- onTerminalReady?.(terminal);
59
-
60
- return () => {
61
- resizeObserver.disconnect();
62
- terminal.dispose();
63
- };
64
- }, []);
65
-
66
- useEffect(() => {
67
- const terminal = terminalRef.current!;
68
-
69
- // we render a transparent cursor in case the terminal is readonly
70
- terminal.options.theme = getTerminalTheme(readonly ? { cursor: '#00000000' } : {});
71
-
72
- terminal.options.disableStdin = readonly;
73
- }, [theme, readonly]);
74
-
75
- useImperativeHandle(ref, () => {
76
- return {
77
- reloadStyles: () => {
78
- const terminal = terminalRef.current!;
79
- terminal.options.theme = getTerminalTheme(readonly ? { cursor: '#00000000' } : {});
80
- },
81
- };
82
- }, []);
83
-
84
- return <div className={className} ref={terminalElementRef} />;
85
- }),
 
 
86
  );
 
16
  className?: string;
17
  theme: Theme;
18
  readonly?: boolean;
19
+ id: string;
20
  onTerminalReady?: (terminal: XTerm) => void;
21
  onTerminalResize?: (cols: number, rows: number) => void;
22
  }
23
 
24
  export const Terminal = memo(
25
+ forwardRef<TerminalRef, TerminalProps>(
26
+ ({ className, theme, readonly, id, onTerminalReady, onTerminalResize }, ref) => {
27
+ const terminalElementRef = useRef<HTMLDivElement>(null);
28
+ const terminalRef = useRef<XTerm>();
29
+
30
+ useEffect(() => {
31
+ const element = terminalElementRef.current!;
32
+
33
+ const fitAddon = new FitAddon();
34
+ const webLinksAddon = new WebLinksAddon();
35
+
36
+ const terminal = new XTerm({
37
+ cursorBlink: true,
38
+ convertEol: true,
39
+ disableStdin: readonly,
40
+ theme: getTerminalTheme(readonly ? { cursor: '#00000000' } : {}),
41
+ fontSize: 12,
42
+ fontFamily: 'Menlo, courier-new, courier, monospace',
43
+ });
44
+
45
+ terminalRef.current = terminal;
46
+
47
+ terminal.loadAddon(fitAddon);
48
+ terminal.loadAddon(webLinksAddon);
49
+ terminal.open(element);
50
+
51
+ const resizeObserver = new ResizeObserver(() => {
52
+ fitAddon.fit();
53
+ onTerminalResize?.(terminal.cols, terminal.rows);
54
+ });
55
+
56
+ resizeObserver.observe(element);
57
+
58
+ logger.debug(`Attach [${id}]`);
59
+
60
+ onTerminalReady?.(terminal);
61
+
62
+ return () => {
63
+ resizeObserver.disconnect();
64
+ terminal.dispose();
65
+ };
66
+ }, []);
67
+
68
+ useEffect(() => {
69
+ const terminal = terminalRef.current!;
70
+
71
+ // we render a transparent cursor in case the terminal is readonly
72
+ terminal.options.theme = getTerminalTheme(readonly ? { cursor: '#00000000' } : {});
73
+
74
+ terminal.options.disableStdin = readonly;
75
+ }, [theme, readonly]);
76
+
77
+ useImperativeHandle(ref, () => {
78
+ return {
79
+ reloadStyles: () => {
80
+ const terminal = terminalRef.current!;
81
+ terminal.options.theme = getTerminalTheme(readonly ? { cursor: '#00000000' } : {});
82
+ },
83
+ };
84
+ }, []);
85
+
86
+ return <div className={className} ref={terminalElementRef} />;
87
+ },
88
+ ),
89
  );
app/components/workbench/terminal/TerminalTabs.tsx ADDED
@@ -0,0 +1,186 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { useStore } from '@nanostores/react';
2
+ import React, { memo, useEffect, useRef, useState } from 'react';
3
+ import { Panel, type ImperativePanelHandle } from 'react-resizable-panels';
4
+ import { IconButton } from '~/components/ui/IconButton';
5
+ import { shortcutEventEmitter } from '~/lib/hooks';
6
+ import { themeStore } from '~/lib/stores/theme';
7
+ import { workbenchStore } from '~/lib/stores/workbench';
8
+ import { classNames } from '~/utils/classNames';
9
+ import { Terminal, type TerminalRef } from './Terminal';
10
+ import { createScopedLogger } from '~/utils/logger';
11
+
12
+ const logger = createScopedLogger('Terminal');
13
+
14
+ const MAX_TERMINALS = 3;
15
+ export const DEFAULT_TERMINAL_SIZE = 25;
16
+
17
+ export const TerminalTabs = memo(() => {
18
+ const showTerminal = useStore(workbenchStore.showTerminal);
19
+ const theme = useStore(themeStore);
20
+
21
+ const terminalRefs = useRef<Array<TerminalRef | null>>([]);
22
+ const terminalPanelRef = useRef<ImperativePanelHandle>(null);
23
+ const terminalToggledByShortcut = useRef(false);
24
+
25
+ const [activeTerminal, setActiveTerminal] = useState(0);
26
+ const [terminalCount, setTerminalCount] = useState(1);
27
+
28
+ const addTerminal = () => {
29
+ if (terminalCount < MAX_TERMINALS) {
30
+ setTerminalCount(terminalCount + 1);
31
+ setActiveTerminal(terminalCount);
32
+ }
33
+ };
34
+
35
+ useEffect(() => {
36
+ const { current: terminal } = terminalPanelRef;
37
+
38
+ if (!terminal) {
39
+ return;
40
+ }
41
+
42
+ const isCollapsed = terminal.isCollapsed();
43
+
44
+ if (!showTerminal && !isCollapsed) {
45
+ terminal.collapse();
46
+ } else if (showTerminal && isCollapsed) {
47
+ terminal.resize(DEFAULT_TERMINAL_SIZE);
48
+ }
49
+
50
+ terminalToggledByShortcut.current = false;
51
+ }, [showTerminal]);
52
+
53
+ useEffect(() => {
54
+ const unsubscribeFromEventEmitter = shortcutEventEmitter.on('toggleTerminal', () => {
55
+ terminalToggledByShortcut.current = true;
56
+ });
57
+
58
+ const unsubscribeFromThemeStore = themeStore.subscribe(() => {
59
+ for (const ref of Object.values(terminalRefs.current)) {
60
+ ref?.reloadStyles();
61
+ }
62
+ });
63
+
64
+ return () => {
65
+ unsubscribeFromEventEmitter();
66
+ unsubscribeFromThemeStore();
67
+ };
68
+ }, []);
69
+
70
+ return (
71
+ <Panel
72
+ ref={terminalPanelRef}
73
+ defaultSize={showTerminal ? DEFAULT_TERMINAL_SIZE : 0}
74
+ minSize={10}
75
+ collapsible
76
+ onExpand={() => {
77
+ if (!terminalToggledByShortcut.current) {
78
+ workbenchStore.toggleTerminal(true);
79
+ }
80
+ }}
81
+ onCollapse={() => {
82
+ if (!terminalToggledByShortcut.current) {
83
+ workbenchStore.toggleTerminal(false);
84
+ }
85
+ }}
86
+ >
87
+ <div className="h-full">
88
+ <div className="bg-bolt-elements-terminals-background h-full flex flex-col">
89
+ <div className="flex items-center bg-bolt-elements-background-depth-2 border-y border-bolt-elements-borderColor gap-1.5 min-h-[34px] p-2">
90
+ {Array.from({ length: terminalCount + 1 }, (_, index) => {
91
+ const isActive = activeTerminal === index;
92
+
93
+ return (
94
+ <React.Fragment key={index}>
95
+ {index == 0 ? (
96
+ <button
97
+ key={index}
98
+ className={classNames(
99
+ 'flex items-center text-sm cursor-pointer gap-1.5 px-3 py-2 h-full whitespace-nowrap rounded-full',
100
+ {
101
+ 'bg-bolt-elements-terminals-buttonBackground text-bolt-elements-textSecondary hover:text-bolt-elements-textPrimary':
102
+ isActive,
103
+ 'bg-bolt-elements-background-depth-2 text-bolt-elements-textSecondary hover:bg-bolt-elements-terminals-buttonBackground':
104
+ !isActive,
105
+ },
106
+ )}
107
+ onClick={() => setActiveTerminal(index)}
108
+ >
109
+ <div className="i-ph:terminal-window-duotone text-lg" />
110
+ Bolt Terminal
111
+ </button>
112
+ ) : (
113
+ <React.Fragment>
114
+ <button
115
+ key={index}
116
+ className={classNames(
117
+ 'flex items-center text-sm cursor-pointer gap-1.5 px-3 py-2 h-full whitespace-nowrap rounded-full',
118
+ {
119
+ 'bg-bolt-elements-terminals-buttonBackground text-bolt-elements-textPrimary': isActive,
120
+ 'bg-bolt-elements-background-depth-2 text-bolt-elements-textSecondary hover:bg-bolt-elements-terminals-buttonBackground':
121
+ !isActive,
122
+ },
123
+ )}
124
+ onClick={() => setActiveTerminal(index)}
125
+ >
126
+ <div className="i-ph:terminal-window-duotone text-lg" />
127
+ Terminal {terminalCount > 1 && index}
128
+ </button>
129
+ </React.Fragment>
130
+ )}
131
+ </React.Fragment>
132
+ );
133
+ })}
134
+ {terminalCount < MAX_TERMINALS && <IconButton icon="i-ph:plus" size="md" onClick={addTerminal} />}
135
+ <IconButton
136
+ className="ml-auto"
137
+ icon="i-ph:caret-down"
138
+ title="Close"
139
+ size="md"
140
+ onClick={() => workbenchStore.toggleTerminal(false)}
141
+ />
142
+ </div>
143
+ {Array.from({ length: terminalCount + 1 }, (_, index) => {
144
+ const isActive = activeTerminal === index;
145
+
146
+ logger.debug(`Starting bolt terminal [${index}]`);
147
+
148
+ if (index == 0) {
149
+ return (
150
+ <Terminal
151
+ key={index}
152
+ id={`terminal_${index}`}
153
+ className={classNames('h-full overflow-hidden', {
154
+ hidden: !isActive,
155
+ })}
156
+ ref={(ref) => {
157
+ terminalRefs.current.push(ref);
158
+ }}
159
+ onTerminalReady={(terminal) => workbenchStore.attachBoltTerminal(terminal)}
160
+ onTerminalResize={(cols, rows) => workbenchStore.onTerminalResize(cols, rows)}
161
+ theme={theme}
162
+ />
163
+ );
164
+ } else {
165
+ return (
166
+ <Terminal
167
+ key={index}
168
+ id={`terminal_${index}`}
169
+ className={classNames('h-full overflow-hidden', {
170
+ hidden: !isActive,
171
+ })}
172
+ ref={(ref) => {
173
+ terminalRefs.current.push(ref);
174
+ }}
175
+ onTerminalReady={(terminal) => workbenchStore.attachTerminal(terminal)}
176
+ onTerminalResize={(cols, rows) => workbenchStore.onTerminalResize(cols, rows)}
177
+ theme={theme}
178
+ />
179
+ );
180
+ }
181
+ })}
182
+ </div>
183
+ </div>
184
+ </Panel>
185
+ );
186
+ });
app/entry.server.tsx CHANGED
@@ -43,7 +43,7 @@ export default async function handleRequest(
43
  .read()
44
  .then(({ done, value }) => {
45
  if (done) {
46
- controller.enqueue(new Uint8Array(new TextEncoder().encode(`</div></body></html>`)));
47
  controller.close();
48
 
49
  return;
 
43
  .read()
44
  .then(({ done, value }) => {
45
  if (done) {
46
+ controller.enqueue(new Uint8Array(new TextEncoder().encode('</div></body></html>')));
47
  controller.close();
48
 
49
  return;
app/lib/.server/llm/api-key.ts CHANGED
@@ -35,6 +35,8 @@ export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Re
35
  return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY;
36
  case 'OpenAILike':
37
  return env.OPENAI_LIKE_API_KEY || cloudflareEnv.OPENAI_LIKE_API_KEY;
 
 
38
  case 'xAI':
39
  return env.XAI_API_KEY || cloudflareEnv.XAI_API_KEY;
40
  case 'Cohere':
@@ -48,6 +50,8 @@ export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Re
48
 
49
  export function getBaseURL(cloudflareEnv: Env, provider: string) {
50
  switch (provider) {
 
 
51
  case 'OpenAILike':
52
  return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
53
  case 'LMStudio':
 
35
  return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY;
36
  case 'OpenAILike':
37
  return env.OPENAI_LIKE_API_KEY || cloudflareEnv.OPENAI_LIKE_API_KEY;
38
+ case 'Together':
39
+ return env.TOGETHER_API_KEY || cloudflareEnv.TOGETHER_API_KEY;
40
  case 'xAI':
41
  return env.XAI_API_KEY || cloudflareEnv.XAI_API_KEY;
42
  case 'Cohere':
 
50
 
51
  export function getBaseURL(cloudflareEnv: Env, provider: string) {
52
  switch (provider) {
53
+ case 'Together':
54
+ return env.TOGETHER_API_BASE_URL || cloudflareEnv.TOGETHER_API_BASE_URL;
55
  case 'OpenAILike':
56
  return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
57
  case 'LMStudio':
app/lib/.server/llm/model.ts CHANGED
@@ -146,6 +146,8 @@ export function getModel(provider: string, model: string, env: Env, apiKeys?: Re
146
  return getGoogleModel(apiKey, model);
147
  case 'OpenAILike':
148
  return getOpenAILikeModel(baseURL, apiKey, model);
 
 
149
  case 'Deepseek':
150
  return getDeepseekModel(apiKey, model);
151
  case 'Mistral':
 
146
  return getGoogleModel(apiKey, model);
147
  case 'OpenAILike':
148
  return getOpenAILikeModel(baseURL, apiKey, model);
149
+ case 'Together':
150
+ return getOpenAILikeModel(baseURL, apiKey, model);
151
  case 'Deepseek':
152
  return getDeepseekModel(apiKey, model);
153
  case 'Mistral':
app/lib/persistence/db.ts CHANGED
@@ -6,6 +6,11 @@ const logger = createScopedLogger('ChatHistory');
6
 
7
  // this is used at the top level and never rejects
8
  export async function openDatabase(): Promise<IDBDatabase | undefined> {
 
 
 
 
 
9
  return new Promise((resolve) => {
10
  const request = indexedDB.open('boltHistory', 1);
11
 
 
6
 
7
  // this is used at the top level and never rejects
8
  export async function openDatabase(): Promise<IDBDatabase | undefined> {
9
+ if (typeof indexedDB === 'undefined') {
10
+ console.error('indexedDB is not available in this environment.');
11
+ return undefined;
12
+ }
13
+
14
  return new Promise((resolve) => {
15
  const request = indexedDB.open('boltHistory', 1);
16
 
app/lib/persistence/useChatHistory.ts CHANGED
@@ -43,7 +43,7 @@ export function useChatHistory() {
43
  setReady(true);
44
 
45
  if (persistenceEnabled) {
46
- toast.error(`Chat persistence is unavailable`);
47
  }
48
 
49
  return;
@@ -63,7 +63,7 @@ export function useChatHistory() {
63
  description.set(storedMessages.description);
64
  chatId.set(storedMessages.id);
65
  } else {
66
- navigate(`/`, { replace: true });
67
  }
68
 
69
  setReady(true);
 
43
  setReady(true);
44
 
45
  if (persistenceEnabled) {
46
+ toast.error('Chat persistence is unavailable');
47
  }
48
 
49
  return;
 
63
  description.set(storedMessages.description);
64
  chatId.set(storedMessages.id);
65
  } else {
66
+ navigate('/', { replace: true });
67
  }
68
 
69
  setReady(true);
app/lib/runtime/action-runner.ts CHANGED
@@ -84,11 +84,11 @@ export class ActionRunner {
84
  }
85
 
86
  if (action.executed) {
87
- return;
88
  }
89
 
90
  if (isStreaming && action.type !== 'file') {
91
- return;
92
  }
93
 
94
  this.#updateAction(actionId, { ...action, ...data.action, executed: !isStreaming });
@@ -100,7 +100,6 @@ export class ActionRunner {
100
  .catch((error) => {
101
  console.error('Action failed:', error);
102
  });
103
- return this.#currentExecutionPromise;
104
  }
105
 
106
  async #executeAction(actionId: string, isStreaming: boolean = false) {
 
84
  }
85
 
86
  if (action.executed) {
87
+ return; // No return value here
88
  }
89
 
90
  if (isStreaming && action.type !== 'file') {
91
+ return; // No return value here
92
  }
93
 
94
  this.#updateAction(actionId, { ...action, ...data.action, executed: !isStreaming });
 
100
  .catch((error) => {
101
  console.error('Action failed:', error);
102
  });
 
103
  }
104
 
105
  async #executeAction(actionId: string, isStreaming: boolean = false) {
app/lib/stores/workbench.ts CHANGED
@@ -14,6 +14,7 @@ import { saveAs } from 'file-saver';
14
  import { Octokit, type RestEndpointMethodTypes } from '@octokit/rest';
15
  import * as nodePath from 'node:path';
16
  import { extractRelativePath } from '~/utils/diff';
 
17
 
18
  export interface ArtifactState {
19
  id: string;
@@ -330,6 +331,13 @@ export class WorkbenchStore {
330
  const zip = new JSZip();
331
  const files = this.files.get();
332
 
 
 
 
 
 
 
 
333
  for (const [filePath, dirent] of Object.entries(files)) {
334
  if (dirent?.type === 'file' && !dirent.isBinary) {
335
  const relativePath = extractRelativePath(filePath);
@@ -352,8 +360,9 @@ export class WorkbenchStore {
352
  }
353
  }
354
 
 
355
  const content = await zip.generateAsync({ type: 'blob' });
356
- saveAs(content, 'project.zip');
357
  }
358
 
359
  async syncFiles(targetHandle: FileSystemDirectoryHandle) {
@@ -371,7 +380,9 @@ export class WorkbenchStore {
371
  }
372
 
373
  // create or get the file
374
- const fileHandle = await currentHandle.getFileHandle(pathSegments[pathSegments.length - 1], { create: true });
 
 
375
 
376
  // write the file content
377
  const writable = await fileHandle.createWritable();
 
14
  import { Octokit, type RestEndpointMethodTypes } from '@octokit/rest';
15
  import * as nodePath from 'node:path';
16
  import { extractRelativePath } from '~/utils/diff';
17
+ import { description } from '~/lib/persistence';
18
 
19
  export interface ArtifactState {
20
  id: string;
 
331
  const zip = new JSZip();
332
  const files = this.files.get();
333
 
334
+ // Get the project name from the description input, or use a default name
335
+ const projectName = (description.value ?? 'project').toLocaleLowerCase().split(' ').join('_');
336
+
337
+ // Generate a simple 6-character hash based on the current timestamp
338
+ const timestampHash = Date.now().toString(36).slice(-6);
339
+ const uniqueProjectName = `${projectName}_${timestampHash}`;
340
+
341
  for (const [filePath, dirent] of Object.entries(files)) {
342
  if (dirent?.type === 'file' && !dirent.isBinary) {
343
  const relativePath = extractRelativePath(filePath);
 
360
  }
361
  }
362
 
363
+ // Generate the zip file and save it
364
  const content = await zip.generateAsync({ type: 'blob' });
365
+ saveAs(content, `${uniqueProjectName}.zip`);
366
  }
367
 
368
  async syncFiles(targetHandle: FileSystemDirectoryHandle) {
 
380
  }
381
 
382
  // create or get the file
383
+ const fileHandle = await currentHandle.getFileHandle(pathSegments[pathSegments.length - 1], {
384
+ create: true,
385
+ });
386
 
387
  // write the file content
388
  const writable = await fileHandle.createWritable();
app/routes/api.enhancer.ts CHANGED
@@ -44,9 +44,25 @@ async function enhancerAction({ context, request }: ActionFunctionArgs) {
44
  content:
45
  `[Model: ${model}]\n\n[Provider: ${providerName}]\n\n` +
46
  stripIndents`
 
 
47
  I want you to improve the user prompt that is wrapped in \`<original_prompt>\` tags.
48
 
49
- IMPORTANT: Only respond with the improved prompt and nothing else!
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50
 
51
  <original_prompt>
52
  ${message}
@@ -79,7 +95,7 @@ async function enhancerAction({ context, request }: ActionFunctionArgs) {
79
  },
80
  });
81
 
82
- const transformedStream = result.toAIStream().pipeThrough(transformStream);
83
 
84
  return new StreamingTextResponse(transformedStream);
85
  } catch (error: unknown) {
 
44
  content:
45
  `[Model: ${model}]\n\n[Provider: ${providerName}]\n\n` +
46
  stripIndents`
47
+ You are a professional prompt engineer specializing in crafting precise, effective prompts.
48
+ Your task is to enhance prompts by making them more specific, actionable, and effective.
49
  I want you to improve the user prompt that is wrapped in \`<original_prompt>\` tags.
50
 
51
+ For valid prompts:
52
+ - Make instructions explicit and unambiguous
53
+ - Add relevant context and constraints
54
+ - Remove redundant information
55
+ - Maintain the core intent
56
+ - Ensure the prompt is self-contained
57
+ - Use professional language
58
+ For invalid or unclear prompts:
59
+ - Respond with a clear, professional guidance message
60
+ - Keep responses concise and actionable
61
+ - Maintain a helpful, constructive tone
62
+ - Focus on what the user should provide
63
+ - Use a standard template for consistency
64
+ IMPORTANT: Your response must ONLY contain the enhanced prompt text.
65
+ Do not include any explanations, metadata, or wrapper tags.
66
 
67
  <original_prompt>
68
  ${message}
 
95
  },
96
  });
97
 
98
+ const transformedStream = result.toDataStream().pipeThrough(transformStream);
99
 
100
  return new StreamingTextResponse(transformedStream);
101
  } catch (error: unknown) {
app/styles/components/resize-handle.scss CHANGED
@@ -1,3 +1,5 @@
 
 
1
  [data-resize-handle] {
2
  position: relative;
3
 
@@ -8,7 +10,7 @@
8
  bottom: 0;
9
  left: -6px;
10
  right: -5px;
11
- z-index: $zIndexMax;
12
  }
13
 
14
  &[data-panel-group-direction='vertical']:after {
@@ -18,7 +20,7 @@
18
  right: 0;
19
  top: -5px;
20
  bottom: -6px;
21
- z-index: $zIndexMax;
22
  }
23
 
24
  &[data-resize-handle-state='hover']:after,
 
1
+ @use '../z-index';
2
+
3
  [data-resize-handle] {
4
  position: relative;
5
 
 
10
  bottom: 0;
11
  left: -6px;
12
  right: -5px;
13
+ z-index: z-index.$zIndexMax;
14
  }
15
 
16
  &[data-panel-group-direction='vertical']:after {
 
20
  right: 0;
21
  top: -5px;
22
  bottom: -6px;
23
+ z-index: z-index.$zIndexMax;
24
  }
25
 
26
  &[data-resize-handle-state='hover']:after,
app/styles/index.scss CHANGED
@@ -1,11 +1,11 @@
1
- @import './variables.scss';
2
- @import './z-index.scss';
3
- @import './animations.scss';
4
- @import './components/terminal.scss';
5
- @import './components/resize-handle.scss';
6
- @import './components/code.scss';
7
- @import './components/editor.scss';
8
- @import './components/toast.scss';
9
 
10
  html,
11
  body {
 
1
+ @use 'variables.scss';
2
+ @use 'z-index.scss';
3
+ @use 'animations.scss';
4
+ @use 'components/terminal.scss';
5
+ @use 'components/resize-handle.scss';
6
+ @use 'components/code.scss';
7
+ @use 'components/editor.scss';
8
+ @use 'components/toast.scss';
9
 
10
  html,
11
  body {
app/utils/constants.ts CHANGED
@@ -7,6 +7,7 @@ export const MODIFICATIONS_TAG_NAME = 'bolt_file_modifications';
7
  export const MODEL_REGEX = /^\[Model: (.*?)\]\n\n/;
8
  export const PROVIDER_REGEX = /\[Provider: (.*?)\]\n\n/;
9
  export const DEFAULT_MODEL = 'claude-3-5-sonnet-latest';
 
10
 
11
  const PROVIDER_LIST: ProviderInfo[] = [
12
  {
@@ -259,6 +260,31 @@ const PROVIDER_LIST: ProviderInfo[] = [
259
  labelForGetApiKey: 'Get LMStudio',
260
  icon: 'i-ph:cloud-arrow-down',
261
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
262
  ];
263
 
264
  export const DEFAULT_PROVIDER = PROVIDER_LIST[0];
@@ -283,9 +309,11 @@ const getOllamaBaseUrl = () => {
283
  };
284
 
285
  async function getOllamaModels(): Promise<ModelInfo[]> {
286
- //if (typeof window === 'undefined') {
287
- //return [];
288
- //}
 
 
289
 
290
  try {
291
  const baseUrl = getOllamaBaseUrl();
 
7
  export const MODEL_REGEX = /^\[Model: (.*?)\]\n\n/;
8
  export const PROVIDER_REGEX = /\[Provider: (.*?)\]\n\n/;
9
  export const DEFAULT_MODEL = 'claude-3-5-sonnet-latest';
10
+ export const PROMPT_COOKIE_KEY = 'cachedPrompt';
11
 
12
  const PROVIDER_LIST: ProviderInfo[] = [
13
  {
 
260
  labelForGetApiKey: 'Get LMStudio',
261
  icon: 'i-ph:cloud-arrow-down',
262
  },
263
+ {
264
+ name: 'Together',
265
+ staticModels: [
266
+ {
267
+ name: 'Qwen/Qwen2.5-Coder-32B-Instruct',
268
+ label: 'Qwen/Qwen2.5-Coder-32B-Instruct',
269
+ provider: 'Together',
270
+ maxTokenAllowed: 8000,
271
+ },
272
+ {
273
+ name: 'meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo',
274
+ label: 'meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo',
275
+ provider: 'Together',
276
+ maxTokenAllowed: 8000,
277
+ },
278
+
279
+ {
280
+ name: 'mistralai/Mixtral-8x7B-Instruct-v0.1',
281
+ label: 'Mixtral 8x7B Instruct',
282
+ provider: 'Together',
283
+ maxTokenAllowed: 8192,
284
+ },
285
+ ],
286
+ getApiKeyLink: 'https://api.together.xyz/settings/api-keys',
287
+ },
288
  ];
289
 
290
  export const DEFAULT_PROVIDER = PROVIDER_LIST[0];
 
309
  };
310
 
311
  async function getOllamaModels(): Promise<ModelInfo[]> {
312
+ /*
313
+ * if (typeof window === 'undefined') {
314
+ * return [];
315
+ * }
316
+ */
317
 
318
  try {
319
  const baseUrl = getOllamaBaseUrl();
app/utils/logger.ts CHANGED
@@ -11,7 +11,7 @@ interface Logger {
11
  setLevel: (level: DebugLevel) => void;
12
  }
13
 
14
- let currentLevel: DebugLevel = import.meta.env.VITE_LOG_LEVEL ?? import.meta.env.DEV ? 'debug' : 'info';
15
 
16
  const isWorker = 'HTMLRewriter' in globalThis;
17
  const supportsColor = !isWorker;
 
11
  setLevel: (level: DebugLevel) => void;
12
  }
13
 
14
+ let currentLevel: DebugLevel = (import.meta.env.VITE_LOG_LEVEL ?? import.meta.env.DEV) ? 'debug' : 'info';
15
 
16
  const isWorker = 'HTMLRewriter' in globalThis;
17
  const supportsColor = !isWorker;
docker-compose.yaml CHANGED
@@ -1,5 +1,5 @@
1
  services:
2
- bolt-ai:
3
  image: bolt-ai:production
4
  build:
5
  context: .
@@ -11,7 +11,7 @@ services:
11
  environment:
12
  - NODE_ENV=production
13
  - COMPOSE_PROFILES=production
14
- # No strictly neded but serving as hints for Coolify
15
  - PORT=5173
16
  - GROQ_API_KEY=${GROQ_API_KEY}
17
  - HuggingFace_API_KEY=${HuggingFace_API_KEY}
@@ -20,16 +20,18 @@ services:
20
  - OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY}
21
  - GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY}
22
  - OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL}
 
 
23
  - VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug}
24
  - DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX:-32768}
25
  - RUNNING_IN_DOCKER=true
26
  extra_hosts:
27
- - "host.docker.internal:host-gateway"
28
  command: pnpm run dockerstart
29
  profiles:
30
- - production # This service only runs in the production profile
31
 
32
- bolt-ai-dev:
33
  image: bolt-ai:development
34
  build:
35
  target: bolt-ai-development
@@ -39,7 +41,7 @@ services:
39
  - VITE_HMR_HOST=localhost
40
  - VITE_HMR_PORT=5173
41
  - CHOKIDAR_USEPOLLING=true
42
- - WATCHPACK_POLLING=true
43
  - PORT=5173
44
  - GROQ_API_KEY=${GROQ_API_KEY}
45
  - HuggingFace_API_KEY=${HuggingFace_API_KEY}
@@ -48,11 +50,13 @@ services:
48
  - OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY}
49
  - GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY}
50
  - OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL}
 
 
51
  - VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug}
52
  - DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX:-32768}
53
  - RUNNING_IN_DOCKER=true
54
  extra_hosts:
55
- - "host.docker.internal:host-gateway"
56
  volumes:
57
  - type: bind
58
  source: .
@@ -60,6 +64,6 @@ services:
60
  consistency: cached
61
  - /app/node_modules
62
  ports:
63
- - "5173:5173" # Same port, no conflict as only one runs at a time
64
  command: pnpm run dev --host 0.0.0.0
65
- profiles: ["development", "default"] # Make development the default profile
 
1
  services:
2
+ app-prod:
3
  image: bolt-ai:production
4
  build:
5
  context: .
 
11
  environment:
12
  - NODE_ENV=production
13
  - COMPOSE_PROFILES=production
14
+ # No strictly needed but serving as hints for Coolify
15
  - PORT=5173
16
  - GROQ_API_KEY=${GROQ_API_KEY}
17
  - HuggingFace_API_KEY=${HuggingFace_API_KEY}
 
20
  - OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY}
21
  - GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY}
22
  - OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL}
23
+ - TOGETHER_API_KEY=${TOGETHER_API_KEY}
24
+ - TOGETHER_API_BASE_URL=${TOGETHER_API_BASE_URL}
25
  - VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug}
26
  - DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX:-32768}
27
  - RUNNING_IN_DOCKER=true
28
  extra_hosts:
29
+ - "host.docker.internal:host-gateway"
30
  command: pnpm run dockerstart
31
  profiles:
32
+ - production
33
 
34
+ app-dev:
35
  image: bolt-ai:development
36
  build:
37
  target: bolt-ai-development
 
41
  - VITE_HMR_HOST=localhost
42
  - VITE_HMR_PORT=5173
43
  - CHOKIDAR_USEPOLLING=true
44
+ - WATCHPACK_POLLING=true
45
  - PORT=5173
46
  - GROQ_API_KEY=${GROQ_API_KEY}
47
  - HuggingFace_API_KEY=${HuggingFace_API_KEY}
 
50
  - OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY}
51
  - GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY}
52
  - OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL}
53
+ - TOGETHER_API_KEY=${TOGETHER_API_KEY}
54
+ - TOGETHER_API_BASE_URL=${TOGETHER_API_BASE_URL}
55
  - VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug}
56
  - DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX:-32768}
57
  - RUNNING_IN_DOCKER=true
58
  extra_hosts:
59
+ - "host.docker.internal:host-gateway"
60
  volumes:
61
  - type: bind
62
  source: .
 
64
  consistency: cached
65
  - /app/node_modules
66
  ports:
67
+ - "5173:5173"
68
  command: pnpm run dev --host 0.0.0.0
69
+ profiles: ["development", "default"]
eslint.config.mjs CHANGED
@@ -4,7 +4,13 @@ import { getNamingConventionRule, tsFileExtensions } from '@blitz/eslint-plugin/
4
 
5
  export default [
6
  {
7
- ignores: ['**/dist', '**/node_modules', '**/.wrangler', '**/bolt/build', '**/.history'],
 
 
 
 
 
 
8
  },
9
  ...blitzPlugin.configs.recommended(),
10
  {
@@ -14,6 +20,15 @@ export default [
14
  '@typescript-eslint/no-empty-object-type': 'off',
15
  '@blitz/comment-syntax': 'off',
16
  '@blitz/block-scope-case': 'off',
 
 
 
 
 
 
 
 
 
17
  },
18
  },
19
  {
@@ -38,7 +53,7 @@ export default [
38
  patterns: [
39
  {
40
  group: ['../'],
41
- message: `Relative imports are not allowed. Please use '~/' instead.`,
42
  },
43
  ],
44
  },
 
4
 
5
  export default [
6
  {
7
+ ignores: [
8
+ '**/dist',
9
+ '**/node_modules',
10
+ '**/.wrangler',
11
+ '**/bolt/build',
12
+ '**/.history',
13
+ ],
14
  },
15
  ...blitzPlugin.configs.recommended(),
16
  {
 
20
  '@typescript-eslint/no-empty-object-type': 'off',
21
  '@blitz/comment-syntax': 'off',
22
  '@blitz/block-scope-case': 'off',
23
+ 'array-bracket-spacing': ["error", "never"],
24
+ 'object-curly-newline': ["error", { "consistent": true }],
25
+ 'keyword-spacing': ["error", { "before": true, "after": true }],
26
+ 'consistent-return': "error",
27
+ 'semi': ["error", "always"],
28
+ 'curly': ["error"],
29
+ 'no-eval': ["error"],
30
+ 'linebreak-style': ["error", "unix"],
31
+ 'arrow-spacing': ["error", { "before": true, "after": true }]
32
  },
33
  },
34
  {
 
53
  patterns: [
54
  {
55
  group: ['../'],
56
+ message: 'Relative imports are not allowed. Please use \'~/\' instead.',
57
  },
58
  ],
59
  },
package-lock.json ADDED
The diff for this file is too large to render. See raw diff
 
package.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "name": "bolt",
3
- "description": "StackBlitz AI Agent",
4
  "private": true,
5
  "license": "MIT",
6
  "sideEffects": false,
@@ -13,7 +13,9 @@
13
  "test:watch": "vitest",
14
  "lint": "eslint --cache --cache-location ./node_modules/.cache/eslint app",
15
  "lint:fix": "npm run lint -- --fix && prettier app --write",
16
- "start": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings",
 
 
17
  "dockerstart": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings --ip 0.0.0.0 --port 5173 --no-show-interactive-dev-session",
18
  "dockerrun": "docker run -it -d --name bolt-ai-live -p 5173:5173 --env-file .env.local bolt-ai",
19
  "dockerbuild:prod": "docker build -t bolt-ai:production -t bolt-ai:latest --target bolt-ai-production .",
@@ -28,95 +30,96 @@
28
  },
29
  "dependencies": {
30
  "@ai-sdk/anthropic": "^0.0.39",
31
- "@ai-sdk/cohere": "^1.0.1",
32
  "@ai-sdk/google": "^0.0.52",
33
  "@ai-sdk/mistral": "^0.0.43",
34
  "@ai-sdk/openai": "^0.0.66",
35
- "@codemirror/autocomplete": "^6.17.0",
36
- "@codemirror/commands": "^6.6.0",
37
  "@codemirror/lang-cpp": "^6.0.2",
38
- "@codemirror/lang-css": "^6.2.1",
39
  "@codemirror/lang-html": "^6.4.9",
40
  "@codemirror/lang-javascript": "^6.2.2",
41
  "@codemirror/lang-json": "^6.0.1",
42
- "@codemirror/lang-markdown": "^6.2.5",
43
  "@codemirror/lang-python": "^6.1.6",
44
  "@codemirror/lang-sass": "^6.0.2",
45
  "@codemirror/lang-wast": "^6.0.2",
46
- "@codemirror/language": "^6.10.2",
47
- "@codemirror/search": "^6.5.6",
48
  "@codemirror/state": "^6.4.1",
49
- "@codemirror/view": "^6.28.4",
50
- "@iconify-json/ph": "^1.1.13",
51
- "@iconify-json/svg-spinners": "^1.1.2",
52
- "@lezer/highlight": "^1.2.0",
53
- "@nanostores/react": "^0.7.2",
54
  "@octokit/rest": "^21.0.2",
55
- "@octokit/types": "^13.6.1",
56
  "@openrouter/ai-sdk-provider": "^0.0.5",
57
- "@radix-ui/react-dialog": "^1.1.1",
58
- "@radix-ui/react-dropdown-menu": "^2.1.1",
59
  "@radix-ui/react-tooltip": "^1.1.4",
60
- "@remix-run/cloudflare": "^2.10.2",
61
- "@remix-run/cloudflare-pages": "^2.10.2",
62
- "@remix-run/react": "^2.10.2",
63
- "@uiw/codemirror-theme-vscode": "^4.23.0",
64
- "@unocss/reset": "^0.61.0",
65
  "@webcontainer/api": "1.3.0-internal.10",
66
  "@xterm/addon-fit": "^0.10.0",
67
  "@xterm/addon-web-links": "^0.11.0",
68
  "@xterm/xterm": "^5.5.0",
69
- "ai": "^3.4.9",
70
  "date-fns": "^3.6.0",
71
  "diff": "^5.2.0",
72
  "file-saver": "^2.0.5",
73
- "framer-motion": "^11.2.12",
74
  "ignore": "^6.0.2",
75
- "isbot": "^4.1.0",
76
  "istextorbinary": "^9.5.0",
77
- "jose": "^5.6.3",
78
  "js-cookie": "^3.0.5",
79
  "jszip": "^3.10.1",
80
  "nanostores": "^0.10.3",
81
  "ollama-ai-provider": "^0.15.2",
82
- "react": "^18.2.0",
83
- "react-dom": "^18.2.0",
84
- "react-hotkeys-hook": "^4.5.0",
 
85
  "react-markdown": "^9.0.1",
86
- "react-resizable-panels": "^2.0.20",
87
- "react-toastify": "^10.0.5",
88
  "rehype-raw": "^7.0.0",
89
  "rehype-sanitize": "^6.0.0",
90
  "remark-gfm": "^4.0.0",
91
  "remix-island": "^0.2.0",
92
- "remix-utils": "^7.6.0",
93
- "shiki": "^1.9.1",
94
  "unist-util-visit": "^5.0.0"
95
  },
96
  "devDependencies": {
97
  "@blitz/eslint-plugin": "0.1.0",
98
- "@cloudflare/workers-types": "^4.20240620.0",
99
- "@remix-run/dev": "^2.10.0",
100
- "@types/diff": "^5.2.1",
101
  "@types/file-saver": "^2.0.7",
102
  "@types/js-cookie": "^3.0.6",
103
- "@types/react": "^18.2.20",
104
- "@types/react-dom": "^18.2.7",
105
  "fast-glob": "^3.3.2",
106
  "husky": "9.1.7",
107
  "is-ci": "^3.0.1",
108
  "node-fetch": "^3.3.2",
109
- "prettier": "^3.3.2",
110
- "sass-embedded": "^1.80.3",
111
- "typescript": "^5.5.2",
112
  "unified": "^11.0.5",
113
- "unocss": "^0.61.3",
114
- "vite": "^5.3.6",
115
  "vite-plugin-node-polyfills": "^0.22.0",
116
  "vite-plugin-optimize-css-modules": "^1.1.0",
117
  "vite-tsconfig-paths": "^4.3.2",
118
- "vitest": "^2.0.1",
119
- "wrangler": "^3.63.2",
120
  "zod": "^3.23.8"
121
  },
122
  "resolutions": {
 
1
  {
2
  "name": "bolt",
3
+ "description": "An AI Agent",
4
  "private": true,
5
  "license": "MIT",
6
  "sideEffects": false,
 
13
  "test:watch": "vitest",
14
  "lint": "eslint --cache --cache-location ./node_modules/.cache/eslint app",
15
  "lint:fix": "npm run lint -- --fix && prettier app --write",
16
+ "start:windows": "wrangler pages dev ./build/client",
17
+ "start:unix": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings",
18
+ "start": "node -e \"const { spawn } = require('child_process'); const isWindows = process.platform === 'win32'; const cmd = isWindows ? 'npm run start:windows' : 'npm run start:unix'; const child = spawn(cmd, { shell: true, stdio: 'inherit' }); child.on('exit', code => process.exit(code));\"",
19
  "dockerstart": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings --ip 0.0.0.0 --port 5173 --no-show-interactive-dev-session",
20
  "dockerrun": "docker run -it -d --name bolt-ai-live -p 5173:5173 --env-file .env.local bolt-ai",
21
  "dockerbuild:prod": "docker build -t bolt-ai:production -t bolt-ai:latest --target bolt-ai-production .",
 
30
  },
31
  "dependencies": {
32
  "@ai-sdk/anthropic": "^0.0.39",
33
+ "@ai-sdk/cohere": "^1.0.3",
34
  "@ai-sdk/google": "^0.0.52",
35
  "@ai-sdk/mistral": "^0.0.43",
36
  "@ai-sdk/openai": "^0.0.66",
37
+ "@codemirror/autocomplete": "^6.18.3",
38
+ "@codemirror/commands": "^6.7.1",
39
  "@codemirror/lang-cpp": "^6.0.2",
40
+ "@codemirror/lang-css": "^6.3.1",
41
  "@codemirror/lang-html": "^6.4.9",
42
  "@codemirror/lang-javascript": "^6.2.2",
43
  "@codemirror/lang-json": "^6.0.1",
44
+ "@codemirror/lang-markdown": "^6.3.1",
45
  "@codemirror/lang-python": "^6.1.6",
46
  "@codemirror/lang-sass": "^6.0.2",
47
  "@codemirror/lang-wast": "^6.0.2",
48
+ "@codemirror/language": "^6.10.6",
49
+ "@codemirror/search": "^6.5.8",
50
  "@codemirror/state": "^6.4.1",
51
+ "@codemirror/view": "^6.35.0",
52
+ "@iconify-json/ph": "^1.2.1",
53
+ "@iconify-json/svg-spinners": "^1.2.1",
54
+ "@lezer/highlight": "^1.2.1",
55
+ "@nanostores/react": "^0.7.3",
56
  "@octokit/rest": "^21.0.2",
57
+ "@octokit/types": "^13.6.2",
58
  "@openrouter/ai-sdk-provider": "^0.0.5",
59
+ "@radix-ui/react-dialog": "^1.1.2",
60
+ "@radix-ui/react-dropdown-menu": "^2.1.2",
61
  "@radix-ui/react-tooltip": "^1.1.4",
62
+ "@remix-run/cloudflare": "^2.15.0",
63
+ "@remix-run/cloudflare-pages": "^2.15.0",
64
+ "@remix-run/react": "^2.15.0",
65
+ "@uiw/codemirror-theme-vscode": "^4.23.6",
66
+ "@unocss/reset": "^0.61.9",
67
  "@webcontainer/api": "1.3.0-internal.10",
68
  "@xterm/addon-fit": "^0.10.0",
69
  "@xterm/addon-web-links": "^0.11.0",
70
  "@xterm/xterm": "^5.5.0",
71
+ "ai": "^3.4.33",
72
  "date-fns": "^3.6.0",
73
  "diff": "^5.2.0",
74
  "file-saver": "^2.0.5",
75
+ "framer-motion": "^11.12.0",
76
  "ignore": "^6.0.2",
77
+ "isbot": "^4.4.0",
78
  "istextorbinary": "^9.5.0",
79
+ "jose": "^5.9.6",
80
  "js-cookie": "^3.0.5",
81
  "jszip": "^3.10.1",
82
  "nanostores": "^0.10.3",
83
  "ollama-ai-provider": "^0.15.2",
84
+ "pnpm": "^9.14.4",
85
+ "react": "^18.3.1",
86
+ "react-dom": "^18.3.1",
87
+ "react-hotkeys-hook": "^4.6.1",
88
  "react-markdown": "^9.0.1",
89
+ "react-resizable-panels": "^2.1.7",
90
+ "react-toastify": "^10.0.6",
91
  "rehype-raw": "^7.0.0",
92
  "rehype-sanitize": "^6.0.0",
93
  "remark-gfm": "^4.0.0",
94
  "remix-island": "^0.2.0",
95
+ "remix-utils": "^7.7.0",
96
+ "shiki": "^1.24.0",
97
  "unist-util-visit": "^5.0.0"
98
  },
99
  "devDependencies": {
100
  "@blitz/eslint-plugin": "0.1.0",
101
+ "@cloudflare/workers-types": "^4.20241127.0",
102
+ "@remix-run/dev": "^2.15.0",
103
+ "@types/diff": "^5.2.3",
104
  "@types/file-saver": "^2.0.7",
105
  "@types/js-cookie": "^3.0.6",
106
+ "@types/react": "^18.3.12",
107
+ "@types/react-dom": "^18.3.1",
108
  "fast-glob": "^3.3.2",
109
  "husky": "9.1.7",
110
  "is-ci": "^3.0.1",
111
  "node-fetch": "^3.3.2",
112
+ "prettier": "^3.4.1",
113
+ "sass-embedded": "^1.81.0",
114
+ "typescript": "^5.7.2",
115
  "unified": "^11.0.5",
116
+ "unocss": "^0.61.9",
117
+ "vite": "^5.4.11",
118
  "vite-plugin-node-polyfills": "^0.22.0",
119
  "vite-plugin-optimize-css-modules": "^1.1.0",
120
  "vite-tsconfig-paths": "^4.3.2",
121
+ "vitest": "^2.1.7",
122
+ "wrangler": "^3.91.0",
123
  "zod": "^3.23.8"
124
  },
125
  "resolutions": {
pnpm-lock.yaml CHANGED
The diff for this file is too large to render. See raw diff
 
vite.config.ts CHANGED
@@ -20,6 +20,7 @@ export default defineConfig((config) => {
20
  v3_fetcherPersist: true,
21
  v3_relativeSplatPath: true,
22
  v3_throwAbortReason: true,
 
23
  },
24
  }),
25
  UnoCSS(),
 
20
  v3_fetcherPersist: true,
21
  v3_relativeSplatPath: true,
22
  v3_throwAbortReason: true,
23
+ v3_lazyRouteDiscovery: true,
24
  },
25
  }),
26
  UnoCSS(),
worker-configuration.d.ts CHANGED
@@ -7,6 +7,8 @@ interface Env {
7
  OLLAMA_API_BASE_URL: string;
8
  OPENAI_LIKE_API_KEY: string;
9
  OPENAI_LIKE_API_BASE_URL: string;
 
 
10
  DEEPSEEK_API_KEY: string;
11
  LMSTUDIO_API_BASE_URL: string;
12
  GOOGLE_GENERATIVE_AI_API_KEY: string;
 
7
  OLLAMA_API_BASE_URL: string;
8
  OPENAI_LIKE_API_KEY: string;
9
  OPENAI_LIKE_API_BASE_URL: string;
10
+ TOGETHER_API_KEY: string;
11
+ TOGETHER_API_BASE_URL: string;
12
  DEEPSEEK_API_KEY: string;
13
  LMSTUDIO_API_BASE_URL: string;
14
  GOOGLE_GENERATIVE_AI_API_KEY: string;