File size: 10,744 Bytes
d21ddf0
 
0d77763
a7d8693
 
2cb3f09
90a206f
2a362b9
bb0546d
756d3f2
a2cca14
756d3f2
 
 
 
 
 
 
 
 
 
 
 
a2cca14
 
 
756d3f2
 
 
 
a2cca14
 
 
 
 
d41a0ac
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a2cca14
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
feb1950
 
 
a2cca14
 
 
 
b945ec8
 
7a03b26
b945ec8
7a03b26
a2cca14
 
 
 
 
 
 
 
 
 
 
 
7bf9c5c
 
 
 
 
 
 
 
 
 
 
 
 
a2cca14
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
756d3f2
 
a2cca14
0d77763
 
756d3f2
a2cca14
 
 
0d77763
 
a6d81b1
 
 
 
 
 
 
2a362b9
a6d81b1
 
2a362b9
 
a2cca14
a6d81b1
 
 
0d77763
 
a6d81b1
8ab8e67
f706523
0d77763
f706523
0d77763
 
a2cca14
0d77763
 
 
 
 
 
30dfa4f
2a362b9
a2cca14
2a362b9
30dfa4f
2a362b9
a2cca14
2a362b9
 
a2cca14
2a362b9
 
8ab8e67
30dfa4f
 
 
a2cca14
30dfa4f
2a362b9
a2cca14
2a362b9
feb1950
30dfa4f
feb1950
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30dfa4f
4edcc5e
 
 
a2cca14
4edcc5e
 
 
 
 
a2cca14
4edcc5e
 
 
 
 
 
 
feb1950
 
a2cca14
b0754e5
 
 
 
feb1950
0d77763
a2cca14
feb1950
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
import type { ModelInfo, OllamaApiResponse, OllamaModel } from './types';
import type { ProviderInfo } from '~/types/model';

export const WORK_DIR_NAME = 'project';
export const WORK_DIR = `/home/${WORK_DIR_NAME}`;
export const MODIFICATIONS_TAG_NAME = 'bolt_file_modifications';
export const MODEL_REGEX = /^\[Model: (.*?)\]\n\n/;
export const PROVIDER_REGEX = /\[Provider: (.*?)\]\n\n/;
export const DEFAULT_MODEL = 'claude-3-5-sonnet-latest';

const PROVIDER_LIST: ProviderInfo[] = [
  {
    name: 'Anthropic',
    staticModels: [
      { name: 'claude-3-5-sonnet-latest', label: 'Claude 3.5 Sonnet (new)', provider: 'Anthropic' },
      { name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet (old)', provider: 'Anthropic' },
      { name: 'claude-3-5-haiku-latest', label: 'Claude 3.5 Haiku (new)', provider: 'Anthropic' },
      { name: 'claude-3-opus-latest', label: 'Claude 3 Opus', provider: 'Anthropic' },
      { name: 'claude-3-sonnet-20240229', label: 'Claude 3 Sonnet', provider: 'Anthropic' },
      { name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic' }
    ],
    getApiKeyLink: "https://console.anthropic.com/settings/keys",
  },
  {
    name: 'Ollama',
    staticModels: [],
    getDynamicModels: getOllamaModels,
    getApiKeyLink: "https://ollama.com/download",
    labelForGetApiKey: "Download Ollama",
    icon: "i-ph:cloud-arrow-down",
  }, {
    name: 'OpenAILike',
    staticModels: [],
    getDynamicModels: getOpenAILikeModels
  },
  {
    name: 'Cohere',
    staticModels: [
      { name: 'command-r-plus-08-2024', label: 'Command R plus Latest', provider: 'Cohere' },
      { name: 'command-r-08-2024', label: 'Command R Latest', provider: 'Cohere' },
      { name: 'command-r-plus', label: 'Command R plus', provider: 'Cohere' },
      { name: 'command-r', label: 'Command R', provider: 'Cohere' },
      { name: 'command', label: 'Command', provider: 'Cohere' },
      { name: 'command-nightly', label: 'Command Nightly', provider: 'Cohere' },
      { name: 'command-light', label: 'Command Light', provider: 'Cohere' },
      { name: 'command-light-nightly', label: 'Command Light Nightly', provider: 'Cohere' },
      { name: 'c4ai-aya-expanse-8b', label: 'c4AI Aya Expanse 8b', provider: 'Cohere' },
      { name: 'c4ai-aya-expanse-32b', label: 'c4AI Aya Expanse 32b', provider: 'Cohere' },
    ],
    getApiKeyLink: 'https://dashboard.cohere.com/api-keys'
  },
  {
    name: 'OpenRouter',
    staticModels: [
      { name: 'gpt-4o', label: 'GPT-4o', provider: 'OpenAI' },
      {
        name: 'anthropic/claude-3.5-sonnet',
        label: 'Anthropic: Claude 3.5 Sonnet (OpenRouter)',
        provider: 'OpenRouter'
      },
      { name: 'anthropic/claude-3-haiku', label: 'Anthropic: Claude 3 Haiku (OpenRouter)', provider: 'OpenRouter' },
      { name: 'deepseek/deepseek-coder', label: 'Deepseek-Coder V2 236B (OpenRouter)', provider: 'OpenRouter' },
      { name: 'google/gemini-flash-1.5', label: 'Google Gemini Flash 1.5 (OpenRouter)', provider: 'OpenRouter' },
      { name: 'google/gemini-pro-1.5', label: 'Google Gemini Pro 1.5 (OpenRouter)', provider: 'OpenRouter' },
      { name: 'x-ai/grok-beta', label: 'xAI Grok Beta (OpenRouter)', provider: 'OpenRouter' },
      { name: 'mistralai/mistral-nemo', label: 'OpenRouter Mistral Nemo (OpenRouter)', provider: 'OpenRouter' },
      { name: 'qwen/qwen-110b-chat', label: 'OpenRouter Qwen 110b Chat (OpenRouter)', provider: 'OpenRouter' },
      { name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter' }
    ],
    getDynamicModels: getOpenRouterModels,
    getApiKeyLink: 'https://openrouter.ai/settings/keys',

  }, {
    name: 'Google',
    staticModels: [
      { name: 'gemini-1.5-flash-latest', label: 'Gemini 1.5 Flash', provider: 'Google' },
      { name: 'gemini-1.5-flash-002', label: 'Gemini 1.5 Flash-002', provider: 'Google' },
      { name: 'gemini-1.5-flash-8b', label: 'Gemini 1.5 Flash-8b', provider: 'Google' },
      { name: 'gemini-1.5-pro-latest', label: 'Gemini 1.5 Pro', provider: 'Google' },
      { name: 'gemini-1.5-pro-002', label: 'Gemini 1.5 Pro-002', provider: 'Google' },
      { name: 'gemini-exp-1114', label: 'Gemini exp-1114', provider: 'Google' }
    ],
    getApiKeyLink: 'https://aistudio.google.com/app/apikey'
  }, {
    name: 'Groq',
    staticModels: [
      { name: 'llama-3.1-70b-versatile', label: 'Llama 3.1 70b (Groq)', provider: 'Groq' },
      { name: 'llama-3.1-8b-instant', label: 'Llama 3.1 8b (Groq)', provider: 'Groq' },
      { name: 'llama-3.2-11b-vision-preview', label: 'Llama 3.2 11b (Groq)', provider: 'Groq' },
      { name: 'llama-3.2-3b-preview', label: 'Llama 3.2 3b (Groq)', provider: 'Groq' },
      { name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq' }
    ],
    getApiKeyLink: 'https://console.groq.com/keys'
  },
  {
    name: 'HuggingFace',
    staticModels: [
      { name: 'Qwen/Qwen2.5-Coder-32B-Instruct', label: 'Qwen2.5-Coder-32B-Instruct (HuggingFace)', provider: 'HuggingFace' },
      { name: '01-ai/Yi-1.5-34B-Chat', label: 'Yi-1.5-34B-Chat (HuggingFace)', provider: 'HuggingFace' },
      { name: 'codellama/CodeLlama-34b-Instruct-hf', label: 'CodeLlama-34b-Instruct (HuggingFace)', provider: 'HuggingFace' },
      { name: 'NousResearch/Hermes-3-Llama-3.1-8B', label: 'Hermes-3-Llama-3.1-8B (HuggingFace)', provider: 'HuggingFace' }
    ],
    getApiKeyLink: 'https://huggingface.co/settings/tokens'
  },
  
  {
    name: 'OpenAI',
    staticModels: [
      { name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI' },
      { name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI' },
      { name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI' },
      { name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI' }
    ],
    getApiKeyLink: "https://platform.openai.com/api-keys",
  }, {
    name: 'xAI',
    staticModels: [
      { name: 'grok-beta', label: 'xAI Grok Beta', provider: 'xAI' }
    ],
    getApiKeyLink: 'https://docs.x.ai/docs/quickstart#creating-an-api-key'
  }, {
    name: 'Deepseek',
    staticModels: [
      { name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek' },
      { name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek' }
    ],
    getApiKeyLink: 'https://platform.deepseek.com/api_keys'
  }, {
    name: 'Mistral',
    staticModels: [
      { name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral' },
      { name: 'open-mixtral-8x7b', label: 'Mistral 8x7B', provider: 'Mistral' },
      { name: 'open-mixtral-8x22b', label: 'Mistral 8x22B', provider: 'Mistral' },
      { name: 'open-codestral-mamba', label: 'Codestral Mamba', provider: 'Mistral' },
      { name: 'open-mistral-nemo', label: 'Mistral Nemo', provider: 'Mistral' },
      { name: 'ministral-8b-latest', label: 'Mistral 8B', provider: 'Mistral' },
      { name: 'mistral-small-latest', label: 'Mistral Small', provider: 'Mistral' },
      { name: 'codestral-latest', label: 'Codestral', provider: 'Mistral' },
      { name: 'mistral-large-latest', label: 'Mistral Large Latest', provider: 'Mistral' }
    ],
    getApiKeyLink: 'https://console.mistral.ai/api-keys/'
  }, {
    name: 'LMStudio',
    staticModels: [],
    getDynamicModels: getLMStudioModels,
    getApiKeyLink: 'https://lmstudio.ai/',
    labelForGetApiKey: 'Get LMStudio',
    icon: "i-ph:cloud-arrow-down",
  }
];

export const DEFAULT_PROVIDER = PROVIDER_LIST[0];

const staticModels: ModelInfo[] = PROVIDER_LIST.map(p => p.staticModels).flat();

export let MODEL_LIST: ModelInfo[] = [...staticModels];

const getOllamaBaseUrl = () => {
  const defaultBaseUrl = import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434';
  // Check if we're in the browser
  if (typeof window !== 'undefined') {
    // Frontend always uses localhost
    return defaultBaseUrl;
  }

  // Backend: Check if we're running in Docker
  const isDocker = process.env.RUNNING_IN_DOCKER === 'true';

  return isDocker
    ? defaultBaseUrl.replace('localhost', 'host.docker.internal')
    : defaultBaseUrl;
};

async function getOllamaModels(): Promise<ModelInfo[]> {
  try {
    const base_url = getOllamaBaseUrl();
    const response = await fetch(`${base_url}/api/tags`);
    const data = await response.json() as OllamaApiResponse;

    return data.models.map((model: OllamaModel) => ({
      name: model.name,
      label: `${model.name} (${model.details.parameter_size})`,
      provider: 'Ollama'
    }));
  } catch (e) {
    return [];
  }
}

async function getOpenAILikeModels(): Promise<ModelInfo[]> {
  try {
    const base_url = import.meta.env.OPENAI_LIKE_API_BASE_URL || '';
    if (!base_url) {
      return [];
    }
    const api_key = import.meta.env.OPENAI_LIKE_API_KEY ?? '';
    const response = await fetch(`${base_url}/models`, {
      headers: {
        Authorization: `Bearer ${api_key}`
      }
    });
    const res = await response.json() as any;
    return res.data.map((model: any) => ({
      name: model.id,
      label: model.id,
      provider: 'OpenAILike'
    }));
  } catch (e) {
    return [];
  }
}

type OpenRouterModelsResponse = {
  data: {
    name: string;
    id: string;
    context_length: number;
    pricing: {
      prompt: number;
      completion: number;
    }
  }[]
};

async function getOpenRouterModels(): Promise<ModelInfo[]> {
  const data: OpenRouterModelsResponse = await (await fetch('https://openrouter.ai/api/v1/models', {
    headers: {
      'Content-Type': 'application/json'
    }
  })).json();

  return data.data.sort((a, b) => a.name.localeCompare(b.name)).map(m => ({
    name: m.id,
    label: `${m.name} - in:$${(m.pricing.prompt * 1_000_000).toFixed(
      2)} out:$${(m.pricing.completion * 1_000_000).toFixed(2)} - context ${Math.floor(
      m.context_length / 1000)}k`,
    provider: 'OpenRouter'
  }));
}

async function getLMStudioModels(): Promise<ModelInfo[]> {
  try {
    const base_url = import.meta.env.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
    const response = await fetch(`${base_url}/v1/models`);
    const data = await response.json() as any;
    return data.data.map((model: any) => ({
      name: model.id,
      label: model.id,
      provider: 'LMStudio'
    }));
  } catch (e) {
    return [];
  }
}



async function initializeModelList(): Promise<ModelInfo[]> {
  MODEL_LIST = [...(await Promise.all(
    PROVIDER_LIST
      .filter((p): p is ProviderInfo & { getDynamicModels: () => Promise<ModelInfo[]> } => !!p.getDynamicModels)
      .map(p => p.getDynamicModels())))
    .flat(), ...staticModels];
  return MODEL_LIST;
}

export { getOllamaModels, getOpenAILikeModels, getLMStudioModels, initializeModelList, getOpenRouterModels, PROVIDER_LIST };