Oliver Jägle commited on
Commit
7d8f811
·
unverified ·
1 Parent(s): fe3e2eb

Fix linting issues

Browse files
app/components/chat/APIKeyManager.tsx CHANGED
@@ -10,6 +10,7 @@ interface APIKeyManagerProps {
10
  labelForGetApiKey?: string;
11
  }
12
 
 
13
  export const APIKeyManager: React.FC<APIKeyManagerProps> = ({ provider, apiKey, setApiKey }) => {
14
  const [isEditing, setIsEditing] = useState(false);
15
  const [tempKey, setTempKey] = useState(apiKey);
 
10
  labelForGetApiKey?: string;
11
  }
12
 
13
+ // eslint-disable-next-line @typescript-eslint/naming-convention
14
  export const APIKeyManager: React.FC<APIKeyManagerProps> = ({ provider, apiKey, setApiKey }) => {
15
  const [isEditing, setIsEditing] = useState(false);
16
  const [tempKey, setTempKey] = useState(apiKey);
app/components/chat/BaseChat.tsx CHANGED
@@ -9,7 +9,7 @@ import { Menu } from '~/components/sidebar/Menu.client';
9
  import { IconButton } from '~/components/ui/IconButton';
10
  import { Workbench } from '~/components/workbench/Workbench.client';
11
  import { classNames } from '~/utils/classNames';
12
- import { MODEL_LIST, DEFAULT_PROVIDER, PROVIDER_LIST, initializeModelList } from '~/utils/constants';
13
  import { Messages } from './Messages.client';
14
  import { SendButton } from './SendButton.client';
15
  import { useState } from 'react';
@@ -27,22 +27,25 @@ const EXAMPLE_PROMPTS = [
27
  { text: 'How do I center a div?' },
28
  ];
29
 
 
30
  const providerList = PROVIDER_LIST;
31
 
 
 
32
  const ModelSelector = ({ model, setModel, provider, setProvider, modelList, providerList, apiKeys }) => {
33
  return (
34
  <div className="mb-2 flex gap-2 flex-col sm:flex-row">
35
  <select
36
  value={provider?.name}
37
  onChange={(e) => {
38
- setProvider(providerList.find((p) => p.name === e.target.value));
39
 
40
  const firstModel = [...modelList].find((m) => m.provider == e.target.value);
41
  setModel(firstModel ? firstModel.name : '');
42
  }}
43
  className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all"
44
  >
45
- {providerList.map((provider) => (
46
  <option key={provider.name} value={provider.name}>
47
  {provider.name}
48
  </option>
 
9
  import { IconButton } from '~/components/ui/IconButton';
10
  import { Workbench } from '~/components/workbench/Workbench.client';
11
  import { classNames } from '~/utils/classNames';
12
+ import { MODEL_LIST, PROVIDER_LIST, initializeModelList } from '~/utils/constants';
13
  import { Messages } from './Messages.client';
14
  import { SendButton } from './SendButton.client';
15
  import { useState } from 'react';
 
27
  { text: 'How do I center a div?' },
28
  ];
29
 
30
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
31
  const providerList = PROVIDER_LIST;
32
 
33
+ // @ts-ignore TODO: Introduce proper types
34
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
35
  const ModelSelector = ({ model, setModel, provider, setProvider, modelList, providerList, apiKeys }) => {
36
  return (
37
  <div className="mb-2 flex gap-2 flex-col sm:flex-row">
38
  <select
39
  value={provider?.name}
40
  onChange={(e) => {
41
+ setProvider(providerList.find((p: ProviderInfo) => p.name === e.target.value));
42
 
43
  const firstModel = [...modelList].find((m) => m.provider == e.target.value);
44
  setModel(firstModel ? firstModel.name : '');
45
  }}
46
  className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all"
47
  >
48
+ {providerList.map((provider: ProviderInfo) => (
49
  <option key={provider.name} value={provider.name}>
50
  {provider.name}
51
  </option>
app/components/sidebar/Menu.client.tsx CHANGED
@@ -2,7 +2,6 @@ import { motion, type Variants } from 'framer-motion';
2
  import { useCallback, useEffect, useRef, useState } from 'react';
3
  import { toast } from 'react-toastify';
4
  import { Dialog, DialogButton, DialogDescription, DialogRoot, DialogTitle } from '~/components/ui/Dialog';
5
- import { IconButton } from '~/components/ui/IconButton';
6
  import { ThemeSwitch } from '~/components/ui/ThemeSwitch';
7
  import { db, deleteById, getAll, chatId, type ChatHistoryItem, useChatHistory } from '~/lib/persistence';
8
  import { cubicEasingFn } from '~/utils/easings';
 
2
  import { useCallback, useEffect, useRef, useState } from 'react';
3
  import { toast } from 'react-toastify';
4
  import { Dialog, DialogButton, DialogDescription, DialogRoot, DialogTitle } from '~/components/ui/Dialog';
 
5
  import { ThemeSwitch } from '~/components/ui/ThemeSwitch';
6
  import { db, deleteById, getAll, chatId, type ChatHistoryItem, useChatHistory } from '~/lib/persistence';
7
  import { cubicEasingFn } from '~/utils/easings';
app/lib/.server/llm/api-key.ts CHANGED
@@ -52,7 +52,7 @@ export function getBaseURL(cloudflareEnv: Env, provider: string) {
52
  return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
53
  case 'LMStudio':
54
  return env.LMSTUDIO_API_BASE_URL || cloudflareEnv.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
55
- case 'Ollama':
56
  let baseUrl = env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || 'http://localhost:11434';
57
 
58
  if (env.RUNNING_IN_DOCKER === 'true') {
@@ -60,6 +60,7 @@ export function getBaseURL(cloudflareEnv: Env, provider: string) {
60
  }
61
 
62
  return baseUrl;
 
63
  default:
64
  return '';
65
  }
 
52
  return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
53
  case 'LMStudio':
54
  return env.LMSTUDIO_API_BASE_URL || cloudflareEnv.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
55
+ case 'Ollama': {
56
  let baseUrl = env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || 'http://localhost:11434';
57
 
58
  if (env.RUNNING_IN_DOCKER === 'true') {
 
60
  }
61
 
62
  return baseUrl;
63
+ }
64
  default:
65
  return '';
66
  }
app/lib/.server/llm/model.ts CHANGED
@@ -11,14 +11,14 @@ import { createOpenRouter } from '@openrouter/ai-sdk-provider';
11
  import { createMistral } from '@ai-sdk/mistral';
12
  import { createCohere } from '@ai-sdk/cohere';
13
 
14
- export function getAnthropicModel(apiKey: string, model: string) {
15
  const anthropic = createAnthropic({
16
  apiKey,
17
  });
18
 
19
  return anthropic(model);
20
  }
21
- export function getOpenAILikeModel(baseURL: string, apiKey: string, model: string) {
22
  const openai = createOpenAI({
23
  baseURL,
24
  apiKey,
@@ -27,7 +27,7 @@ export function getOpenAILikeModel(baseURL: string, apiKey: string, model: strin
27
  return openai(model);
28
  }
29
 
30
- export function getCohereAIModel(apiKey: string, model: string) {
31
  const cohere = createCohere({
32
  apiKey,
33
  });
@@ -35,7 +35,7 @@ export function getCohereAIModel(apiKey: string, model: string) {
35
  return cohere(model);
36
  }
37
 
38
- export function getOpenAIModel(apiKey: string, model: string) {
39
  const openai = createOpenAI({
40
  apiKey,
41
  });
@@ -43,7 +43,7 @@ export function getOpenAIModel(apiKey: string, model: string) {
43
  return openai(model);
44
  }
45
 
46
- export function getMistralModel(apiKey: string, model: string) {
47
  const mistral = createMistral({
48
  apiKey,
49
  });
@@ -51,7 +51,7 @@ export function getMistralModel(apiKey: string, model: string) {
51
  return mistral(model);
52
  }
53
 
54
- export function getGoogleModel(apiKey: string, model: string) {
55
  const google = createGoogleGenerativeAI({
56
  apiKey,
57
  });
@@ -59,7 +59,7 @@ export function getGoogleModel(apiKey: string, model: string) {
59
  return google(model);
60
  }
61
 
62
- export function getGroqModel(apiKey: string, model: string) {
63
  const openai = createOpenAI({
64
  baseURL: 'https://api.groq.com/openai/v1',
65
  apiKey,
@@ -68,7 +68,7 @@ export function getGroqModel(apiKey: string, model: string) {
68
  return openai(model);
69
  }
70
 
71
- export function getHuggingFaceModel(apiKey: string, model: string) {
72
  const openai = createOpenAI({
73
  baseURL: 'https://api-inference.huggingface.co/v1/',
74
  apiKey,
@@ -78,16 +78,16 @@ export function getHuggingFaceModel(apiKey: string, model: string) {
78
  }
79
 
80
  export function getOllamaModel(baseURL: string, model: string) {
81
- const Ollama = ollama(model, {
82
  numCtx: 32768,
83
  });
84
 
85
- Ollama.config.baseURL = `${baseURL}/api`;
86
 
87
- return Ollama;
88
  }
89
 
90
- export function getDeepseekModel(apiKey: string, model: string) {
91
  const openai = createOpenAI({
92
  baseURL: 'https://api.deepseek.com/beta',
93
  apiKey,
@@ -96,7 +96,7 @@ export function getDeepseekModel(apiKey: string, model: string) {
96
  return openai(model);
97
  }
98
 
99
- export function getOpenRouterModel(apiKey: string, model: string) {
100
  const openRouter = createOpenRouter({
101
  apiKey,
102
  });
@@ -113,7 +113,7 @@ export function getLMStudioModel(baseURL: string, model: string) {
113
  return lmstudio(model);
114
  }
115
 
116
- export function getXAIModel(apiKey: string, model: string) {
117
  const openai = createOpenAI({
118
  baseURL: 'https://api.x.ai/v1',
119
  apiKey,
 
11
  import { createMistral } from '@ai-sdk/mistral';
12
  import { createCohere } from '@ai-sdk/cohere';
13
 
14
+ export function getAnthropicModel(apiKey: string | undefined, model: string) {
15
  const anthropic = createAnthropic({
16
  apiKey,
17
  });
18
 
19
  return anthropic(model);
20
  }
21
+ export function getOpenAILikeModel(baseURL: string, apiKey: string | undefined, model: string) {
22
  const openai = createOpenAI({
23
  baseURL,
24
  apiKey,
 
27
  return openai(model);
28
  }
29
 
30
+ export function getCohereAIModel(apiKey: string | undefined, model: string) {
31
  const cohere = createCohere({
32
  apiKey,
33
  });
 
35
  return cohere(model);
36
  }
37
 
38
+ export function getOpenAIModel(apiKey: string | undefined, model: string) {
39
  const openai = createOpenAI({
40
  apiKey,
41
  });
 
43
  return openai(model);
44
  }
45
 
46
+ export function getMistralModel(apiKey: string | undefined, model: string) {
47
  const mistral = createMistral({
48
  apiKey,
49
  });
 
51
  return mistral(model);
52
  }
53
 
54
+ export function getGoogleModel(apiKey: string | undefined, model: string) {
55
  const google = createGoogleGenerativeAI({
56
  apiKey,
57
  });
 
59
  return google(model);
60
  }
61
 
62
+ export function getGroqModel(apiKey: string | undefined, model: string) {
63
  const openai = createOpenAI({
64
  baseURL: 'https://api.groq.com/openai/v1',
65
  apiKey,
 
68
  return openai(model);
69
  }
70
 
71
+ export function getHuggingFaceModel(apiKey: string | undefined, model: string) {
72
  const openai = createOpenAI({
73
  baseURL: 'https://api-inference.huggingface.co/v1/',
74
  apiKey,
 
78
  }
79
 
80
  export function getOllamaModel(baseURL: string, model: string) {
81
+ const ollamaInstance = ollama(model, {
82
  numCtx: 32768,
83
  });
84
 
85
+ ollamaInstance.config.baseURL = `${baseURL}/api`;
86
 
87
+ return ollamaInstance;
88
  }
89
 
90
+ export function getDeepseekModel(apiKey: string | undefined, model: string) {
91
  const openai = createOpenAI({
92
  baseURL: 'https://api.deepseek.com/beta',
93
  apiKey,
 
96
  return openai(model);
97
  }
98
 
99
+ export function getOpenRouterModel(apiKey: string | undefined, model: string) {
100
  const openRouter = createOpenRouter({
101
  apiKey,
102
  });
 
113
  return lmstudio(model);
114
  }
115
 
116
+ export function getXAIModel(apiKey: string | undefined, model: string) {
117
  const openai = createOpenAI({
118
  baseURL: 'https://api.x.ai/v1',
119
  apiKey,
app/lib/persistence/useChatHistory.ts CHANGED
@@ -110,6 +110,7 @@ export function useChatHistory() {
110
  toast.success('Chat duplicated successfully');
111
  } catch (error) {
112
  toast.error('Failed to duplicate chat');
 
113
  }
114
  },
115
  };
 
110
  toast.success('Chat duplicated successfully');
111
  } catch (error) {
112
  toast.error('Failed to duplicate chat');
113
+ console.log(error);
114
  }
115
  },
116
  };
app/lib/runtime/action-runner.ts CHANGED
@@ -1,11 +1,10 @@
1
- import { WebContainer, type WebContainerProcess } from '@webcontainer/api';
2
  import { atom, map, type MapStore } from 'nanostores';
3
  import * as nodePath from 'node:path';
4
  import type { BoltAction } from '~/types/actions';
5
  import { createScopedLogger } from '~/utils/logger';
6
  import { unreachable } from '~/utils/unreachable';
7
  import type { ActionCallbackData } from './message-parser';
8
- import type { ITerminal } from '~/types/terminal';
9
  import type { BoltShell } from '~/utils/shell';
10
 
11
  const logger = createScopedLogger('ActionRunner');
@@ -94,9 +93,10 @@ export class ActionRunner {
94
 
95
  this.#updateAction(actionId, { ...action, ...data.action, executed: !isStreaming });
96
 
 
97
  return (this.#currentExecutionPromise = this.#currentExecutionPromise
98
  .then(() => {
99
- return this.#executeAction(actionId, isStreaming);
100
  })
101
  .catch((error) => {
102
  console.error('Action failed:', error);
@@ -127,12 +127,11 @@ export class ActionRunner {
127
 
128
  /*
129
  * adding a delay to avoid any race condition between 2 start actions
130
- * i am up for a better approch
131
  */
132
  await new Promise((resolve) => setTimeout(resolve, 2000));
133
 
134
  return;
135
- break;
136
  }
137
  }
138
 
 
1
+ import { WebContainer } from '@webcontainer/api';
2
  import { atom, map, type MapStore } from 'nanostores';
3
  import * as nodePath from 'node:path';
4
  import type { BoltAction } from '~/types/actions';
5
  import { createScopedLogger } from '~/utils/logger';
6
  import { unreachable } from '~/utils/unreachable';
7
  import type { ActionCallbackData } from './message-parser';
 
8
  import type { BoltShell } from '~/utils/shell';
9
 
10
  const logger = createScopedLogger('ActionRunner');
 
93
 
94
  this.#updateAction(actionId, { ...action, ...data.action, executed: !isStreaming });
95
 
96
+ // eslint-disable-next-line consistent-return
97
  return (this.#currentExecutionPromise = this.#currentExecutionPromise
98
  .then(() => {
99
+ this.#executeAction(actionId, isStreaming);
100
  })
101
  .catch((error) => {
102
  console.error('Action failed:', error);
 
127
 
128
  /*
129
  * adding a delay to avoid any race condition between 2 start actions
130
+ * i am up for a better approach
131
  */
132
  await new Promise((resolve) => setTimeout(resolve, 2000));
133
 
134
  return;
 
135
  }
136
  }
137
 
app/lib/stores/workbench.ts CHANGED
@@ -13,7 +13,6 @@ import JSZip from 'jszip';
13
  import { saveAs } from 'file-saver';
14
  import { Octokit, type RestEndpointMethodTypes } from '@octokit/rest';
15
  import * as nodePath from 'node:path';
16
- import type { WebContainerProcess } from '@webcontainer/api';
17
  import { extractRelativePath } from '~/utils/diff';
18
 
19
  export interface ArtifactState {
@@ -42,7 +41,6 @@ export class WorkbenchStore {
42
  unsavedFiles: WritableAtom<Set<string>> = import.meta.hot?.data.unsavedFiles ?? atom(new Set<string>());
43
  modifiedFiles = new Set<string>();
44
  artifactIdList: string[] = [];
45
- #boltTerminal: { terminal: ITerminal; process: WebContainerProcess } | undefined;
46
  #globalExecutionQueue = Promise.resolve();
47
  constructor() {
48
  if (import.meta.hot) {
@@ -439,6 +437,8 @@ export class WorkbenchStore {
439
  });
440
  return { path: extractRelativePath(filePath), sha: blob.sha };
441
  }
 
 
442
  }),
443
  );
444
 
 
13
  import { saveAs } from 'file-saver';
14
  import { Octokit, type RestEndpointMethodTypes } from '@octokit/rest';
15
  import * as nodePath from 'node:path';
 
16
  import { extractRelativePath } from '~/utils/diff';
17
 
18
  export interface ArtifactState {
 
41
  unsavedFiles: WritableAtom<Set<string>> = import.meta.hot?.data.unsavedFiles ?? atom(new Set<string>());
42
  modifiedFiles = new Set<string>();
43
  artifactIdList: string[] = [];
 
44
  #globalExecutionQueue = Promise.resolve();
45
  constructor() {
46
  if (import.meta.hot) {
 
437
  });
438
  return { path: extractRelativePath(filePath), sha: blob.sha };
439
  }
440
+
441
+ return null;
442
  }),
443
  );
444
 
app/utils/constants.ts CHANGED
@@ -192,7 +192,7 @@ const PROVIDER_LIST: ProviderInfo[] = [
192
  { name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek', maxTokenAllowed: 8000 },
193
  { name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek', maxTokenAllowed: 8000 },
194
  ],
195
- getApiKeyLink: 'https://platform.deepseek.com/api_keys',
196
  },
197
  {
198
  name: 'Mistral',
@@ -242,8 +242,8 @@ const getOllamaBaseUrl = () => {
242
 
243
  async function getOllamaModels(): Promise<ModelInfo[]> {
244
  try {
245
- const base_url = getOllamaBaseUrl();
246
- const response = await fetch(`${base_url}/api/tags`);
247
  const data = (await response.json()) as OllamaApiResponse;
248
 
249
  return data.models.map((model: OllamaModel) => ({
@@ -252,6 +252,7 @@ async function getOllamaModels(): Promise<ModelInfo[]> {
252
  provider: 'Ollama',
253
  maxTokenAllowed: 8000,
254
  }));
 
255
  } catch (e) {
256
  return [];
257
  }
@@ -259,16 +260,16 @@ async function getOllamaModels(): Promise<ModelInfo[]> {
259
 
260
  async function getOpenAILikeModels(): Promise<ModelInfo[]> {
261
  try {
262
- const base_url = import.meta.env.OPENAI_LIKE_API_BASE_URL || '';
263
 
264
- if (!base_url) {
265
  return [];
266
  }
267
 
268
- const api_key = import.meta.env.OPENAI_LIKE_API_KEY ?? '';
269
- const response = await fetch(`${base_url}/models`, {
270
  headers: {
271
- Authorization: `Bearer ${api_key}`,
272
  },
273
  });
274
  const res = (await response.json()) as any;
@@ -278,6 +279,7 @@ async function getOpenAILikeModels(): Promise<ModelInfo[]> {
278
  label: model.id,
279
  provider: 'OpenAILike',
280
  }));
 
281
  } catch (e) {
282
  return [];
283
  }
@@ -318,8 +320,8 @@ async function getOpenRouterModels(): Promise<ModelInfo[]> {
318
 
319
  async function getLMStudioModels(): Promise<ModelInfo[]> {
320
  try {
321
- const base_url = import.meta.env.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
322
- const response = await fetch(`${base_url}/v1/models`);
323
  const data = (await response.json()) as any;
324
 
325
  return data.data.map((model: any) => ({
@@ -327,6 +329,7 @@ async function getLMStudioModels(): Promise<ModelInfo[]> {
327
  label: model.id,
328
  provider: 'LMStudio',
329
  }));
 
330
  } catch (e) {
331
  return [];
332
  }
 
192
  { name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek', maxTokenAllowed: 8000 },
193
  { name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek', maxTokenAllowed: 8000 },
194
  ],
195
+ getApiKeyLink: 'https://platform.deepseek.com/apiKeys',
196
  },
197
  {
198
  name: 'Mistral',
 
242
 
243
  async function getOllamaModels(): Promise<ModelInfo[]> {
244
  try {
245
+ const baseUrl = getOllamaBaseUrl();
246
+ const response = await fetch(`${baseUrl}/api/tags`);
247
  const data = (await response.json()) as OllamaApiResponse;
248
 
249
  return data.models.map((model: OllamaModel) => ({
 
252
  provider: 'Ollama',
253
  maxTokenAllowed: 8000,
254
  }));
255
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
256
  } catch (e) {
257
  return [];
258
  }
 
260
 
261
  async function getOpenAILikeModels(): Promise<ModelInfo[]> {
262
  try {
263
+ const baseUrl = import.meta.env.OPENAI_LIKE_API_BASE_URL || '';
264
 
265
+ if (!baseUrl) {
266
  return [];
267
  }
268
 
269
+ const apiKey = import.meta.env.OPENAI_LIKE_API_KEY ?? '';
270
+ const response = await fetch(`${baseUrl}/models`, {
271
  headers: {
272
+ Authorization: `Bearer ${apiKey}`,
273
  },
274
  });
275
  const res = (await response.json()) as any;
 
279
  label: model.id,
280
  provider: 'OpenAILike',
281
  }));
282
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
283
  } catch (e) {
284
  return [];
285
  }
 
320
 
321
  async function getLMStudioModels(): Promise<ModelInfo[]> {
322
  try {
323
+ const baseUrl = import.meta.env.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
324
+ const response = await fetch(`${baseUrl}/v1/models`);
325
  const data = (await response.json()) as any;
326
 
327
  return data.data.map((model: any) => ({
 
329
  label: model.id,
330
  provider: 'LMStudio',
331
  }));
332
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
333
  } catch (e) {
334
  return [];
335
  }
app/utils/shell.ts CHANGED
@@ -52,6 +52,8 @@ export async function newShellProcess(webcontainer: WebContainer, terminal: ITer
52
  return process;
53
  }
54
 
 
 
55
  export class BoltShell {
56
  #initialized: (() => void) | undefined;
57
  #readyPromise: Promise<void>;
@@ -61,36 +63,39 @@ export class BoltShell {
61
  executionState = atom<{ sessionId: string; active: boolean; executionPrms?: Promise<any> } | undefined>();
62
  #outputStream: ReadableStreamDefaultReader<string> | undefined;
63
  #shellInputStream: WritableStreamDefaultWriter<string> | undefined;
 
64
  constructor() {
65
  this.#readyPromise = new Promise((resolve) => {
66
  this.#initialized = resolve;
67
  });
68
  }
 
69
  ready() {
70
  return this.#readyPromise;
71
  }
 
72
  async init(webcontainer: WebContainer, terminal: ITerminal) {
73
  this.#webcontainer = webcontainer;
74
  this.#terminal = terminal;
75
 
76
- const callback = (data: string) => {
77
- console.log(data);
78
- };
79
  const { process, output } = await this.newBoltShellProcess(webcontainer, terminal);
80
  this.#process = process;
81
  this.#outputStream = output.getReader();
82
  await this.waitTillOscCode('interactive');
83
  this.#initialized?.();
84
  }
 
85
  get terminal() {
86
  return this.#terminal;
87
  }
 
88
  get process() {
89
  return this.#process;
90
  }
91
- async executeCommand(sessionId: string, command: string) {
 
92
  if (!this.process || !this.terminal) {
93
- return;
94
  }
95
 
96
  const state = this.executionState.get();
@@ -109,14 +114,15 @@ export class BoltShell {
109
  this.terminal.input(command.trim() + '\n');
110
 
111
  //wait for the execution to finish
112
- const executionPrms = this.getCurrentExecutionResult();
113
- this.executionState.set({ sessionId, active: true, executionPrms });
114
 
115
- const resp = await executionPrms;
116
  this.executionState.set({ sessionId, active: false });
117
 
118
  return resp;
119
  }
 
120
  async newBoltShellProcess(webcontainer: WebContainer, terminal: ITerminal) {
121
  const args: string[] = [];
122
 
@@ -167,10 +173,12 @@ export class BoltShell {
167
 
168
  return { process, output: internalOutput };
169
  }
170
- async getCurrentExecutionResult() {
 
171
  const { output, exitCode } = await this.waitTillOscCode('exit');
172
  return { output, exitCode };
173
  }
 
174
  async waitTillOscCode(waitCode: string) {
175
  let fullOutput = '';
176
  let exitCode: number = 0;
@@ -192,7 +200,7 @@ export class BoltShell {
192
  fullOutput += text;
193
 
194
  // Check if command completion signal with exit code
195
- const [, osc, , pid, code] = text.match(/\x1b\]654;([^\x07=]+)=?((-?\d+):(\d+))?\x07/) || [];
196
 
197
  if (osc === 'exit') {
198
  exitCode = parseInt(code, 10);
@@ -206,6 +214,7 @@ export class BoltShell {
206
  return { output: fullOutput, exitCode };
207
  }
208
  }
 
209
  export function newBoltShellProcess() {
210
  return new BoltShell();
211
  }
 
52
  return process;
53
  }
54
 
55
+ export type ExecutionResult = { output: string; exitCode: number } | undefined;
56
+
57
  export class BoltShell {
58
  #initialized: (() => void) | undefined;
59
  #readyPromise: Promise<void>;
 
63
  executionState = atom<{ sessionId: string; active: boolean; executionPrms?: Promise<any> } | undefined>();
64
  #outputStream: ReadableStreamDefaultReader<string> | undefined;
65
  #shellInputStream: WritableStreamDefaultWriter<string> | undefined;
66
+
67
  constructor() {
68
  this.#readyPromise = new Promise((resolve) => {
69
  this.#initialized = resolve;
70
  });
71
  }
72
+
73
  ready() {
74
  return this.#readyPromise;
75
  }
76
+
77
  async init(webcontainer: WebContainer, terminal: ITerminal) {
78
  this.#webcontainer = webcontainer;
79
  this.#terminal = terminal;
80
 
 
 
 
81
  const { process, output } = await this.newBoltShellProcess(webcontainer, terminal);
82
  this.#process = process;
83
  this.#outputStream = output.getReader();
84
  await this.waitTillOscCode('interactive');
85
  this.#initialized?.();
86
  }
87
+
88
  get terminal() {
89
  return this.#terminal;
90
  }
91
+
92
  get process() {
93
  return this.#process;
94
  }
95
+
96
+ async executeCommand(sessionId: string, command: string): Promise<ExecutionResult> {
97
  if (!this.process || !this.terminal) {
98
+ return undefined;
99
  }
100
 
101
  const state = this.executionState.get();
 
114
  this.terminal.input(command.trim() + '\n');
115
 
116
  //wait for the execution to finish
117
+ const executionPromise = this.getCurrentExecutionResult();
118
+ this.executionState.set({ sessionId, active: true, executionPrms: executionPromise });
119
 
120
+ const resp = await executionPromise;
121
  this.executionState.set({ sessionId, active: false });
122
 
123
  return resp;
124
  }
125
+
126
  async newBoltShellProcess(webcontainer: WebContainer, terminal: ITerminal) {
127
  const args: string[] = [];
128
 
 
173
 
174
  return { process, output: internalOutput };
175
  }
176
+
177
+ async getCurrentExecutionResult(): Promise<ExecutionResult> {
178
  const { output, exitCode } = await this.waitTillOscCode('exit');
179
  return { output, exitCode };
180
  }
181
+
182
  async waitTillOscCode(waitCode: string) {
183
  let fullOutput = '';
184
  let exitCode: number = 0;
 
200
  fullOutput += text;
201
 
202
  // Check if command completion signal with exit code
203
+ const [, osc, , , code] = text.match(/\x1b\]654;([^\x07=]+)=?((-?\d+):(\d+))?\x07/) || [];
204
 
205
  if (osc === 'exit') {
206
  exitCode = parseInt(code, 10);
 
214
  return { output: fullOutput, exitCode };
215
  }
216
  }
217
+
218
  export function newBoltShellProcess() {
219
  return new BoltShell();
220
  }
worker-configuration.d.ts CHANGED
@@ -9,4 +9,7 @@ interface Env {
9
  OPENAI_LIKE_API_BASE_URL: string;
10
  DEEPSEEK_API_KEY: string;
11
  LMSTUDIO_API_BASE_URL: string;
 
 
 
12
  }
 
9
  OPENAI_LIKE_API_BASE_URL: string;
10
  DEEPSEEK_API_KEY: string;
11
  LMSTUDIO_API_BASE_URL: string;
12
+ GOOGLE_GENERATIVE_AI_API_KEY: string;
13
+ MISTRAL_API_KEY: string;
14
+ XAI_API_KEY: string;
15
  }