Commit
·
73a07c9
1
Parent(s):
a544611
fix: Resolved
Browse files- app/lib/.server/llm/model.ts +15 -1
- app/utils/constants.ts +24 -6
app/lib/.server/llm/model.ts
CHANGED
@@ -58,7 +58,10 @@ export function getGroqModel(apiKey: string, model: string) {
|
|
58 |
}
|
59 |
|
60 |
export function getOllamaModel(baseURL: string, model: string) {
|
61 |
-
let Ollama = ollama(model
|
|
|
|
|
|
|
62 |
Ollama.config.baseURL = `${baseURL}/api`;
|
63 |
return Ollama;
|
64 |
}
|
@@ -80,6 +83,15 @@ export function getOpenRouterModel(apiKey: string, model: string) {
|
|
80 |
return openRouter.chat(model);
|
81 |
}
|
82 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
83 |
export function getModel(provider: string, model: string, env: Env, apiKeys?: Record<string, string>) {
|
84 |
const apiKey = getAPIKey(env, provider, apiKeys);
|
85 |
const baseURL = getBaseURL(env, provider);
|
@@ -101,6 +113,8 @@ export function getModel(provider: string, model: string, env: Env, apiKeys?: Re
|
|
101 |
return getDeepseekModel(apiKey, model)
|
102 |
case 'Mistral':
|
103 |
return getMistralModel(apiKey, model);
|
|
|
|
|
104 |
default:
|
105 |
return getOllamaModel(baseURL, model);
|
106 |
}
|
|
|
58 |
}
|
59 |
|
60 |
export function getOllamaModel(baseURL: string, model: string) {
|
61 |
+
let Ollama = ollama(model, {
|
62 |
+
numCtx: 32768,
|
63 |
+
});
|
64 |
+
|
65 |
Ollama.config.baseURL = `${baseURL}/api`;
|
66 |
return Ollama;
|
67 |
}
|
|
|
83 |
return openRouter.chat(model);
|
84 |
}
|
85 |
|
86 |
+
export function getXAIModel(apiKey: string, model: string) {
|
87 |
+
const openai = createOpenAI({
|
88 |
+
baseURL: 'https://api.x.ai/v1',
|
89 |
+
apiKey,
|
90 |
+
});
|
91 |
+
|
92 |
+
return openai(model);
|
93 |
+
}
|
94 |
+
|
95 |
export function getModel(provider: string, model: string, env: Env, apiKeys?: Record<string, string>) {
|
96 |
const apiKey = getAPIKey(env, provider, apiKeys);
|
97 |
const baseURL = getBaseURL(env, provider);
|
|
|
113 |
return getDeepseekModel(apiKey, model)
|
114 |
case 'Mistral':
|
115 |
return getMistralModel(apiKey, model);
|
116 |
+
case 'xAI':
|
117 |
+
return getXAIModel(apiKey, model);
|
118 |
default:
|
119 |
return getOllamaModel(baseURL, model);
|
120 |
}
|
app/utils/constants.ts
CHANGED
@@ -4,8 +4,8 @@ export const WORK_DIR_NAME = 'project';
|
|
4 |
export const WORK_DIR = `/home/${WORK_DIR_NAME}`;
|
5 |
export const MODIFICATIONS_TAG_NAME = 'bolt_file_modifications';
|
6 |
export const MODEL_REGEX = /^\[Model: (.*?)\]\n\n/;
|
7 |
-
export const DEFAULT_MODEL = '
|
8 |
-
export const DEFAULT_PROVIDER = '
|
9 |
|
10 |
const staticModels: ModelInfo[] = [
|
11 |
{ name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet', provider: 'Anthropic' },
|
@@ -13,8 +13,9 @@ const staticModels: ModelInfo[] = [
|
|
13 |
{ name: 'anthropic/claude-3.5-sonnet', label: 'Anthropic: Claude 3.5 Sonnet (OpenRouter)', provider: 'OpenRouter' },
|
14 |
{ name: 'anthropic/claude-3-haiku', label: 'Anthropic: Claude 3 Haiku (OpenRouter)', provider: 'OpenRouter' },
|
15 |
{ name: 'deepseek/deepseek-coder', label: 'Deepseek-Coder V2 236B (OpenRouter)', provider: 'OpenRouter' },
|
16 |
-
{ name: 'google/gemini-flash-1.5
|
17 |
-
{ name: 'google/gemini-pro-1.5
|
|
|
18 |
{ name: 'mistralai/mistral-nemo', label: 'OpenRouter Mistral Nemo (OpenRouter)', provider: 'OpenRouter' },
|
19 |
{ name: 'qwen/qwen-110b-chat', label: 'OpenRouter Qwen 110b Chat (OpenRouter)', provider: 'OpenRouter' },
|
20 |
{ name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter' },
|
@@ -32,6 +33,7 @@ const staticModels: ModelInfo[] = [
|
|
32 |
{ name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI' },
|
33 |
{ name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI' },
|
34 |
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI' },
|
|
|
35 |
{ name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek'},
|
36 |
{ name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek'},
|
37 |
{ name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral' },
|
@@ -47,9 +49,25 @@ const staticModels: ModelInfo[] = [
|
|
47 |
|
48 |
export let MODEL_LIST: ModelInfo[] = [...staticModels];
|
49 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
50 |
async function getOllamaModels(): Promise<ModelInfo[]> {
|
51 |
try {
|
52 |
-
const base_url =
|
53 |
const response = await fetch(`${base_url}/api/tags`);
|
54 |
const data = await response.json() as OllamaApiResponse;
|
55 |
|
@@ -92,4 +110,4 @@ async function initializeModelList(): Promise<void> {
|
|
92 |
MODEL_LIST = [...ollamaModels,...openAiLikeModels, ...staticModels];
|
93 |
}
|
94 |
initializeModelList().then();
|
95 |
-
export { getOllamaModels, getOpenAILikeModels, initializeModelList };
|
|
|
4 |
export const WORK_DIR = `/home/${WORK_DIR_NAME}`;
|
5 |
export const MODIFICATIONS_TAG_NAME = 'bolt_file_modifications';
|
6 |
export const MODEL_REGEX = /^\[Model: (.*?)\]\n\n/;
|
7 |
+
export const DEFAULT_MODEL = 'claude-3-5-sonnet-20240620';
|
8 |
+
export const DEFAULT_PROVIDER = 'Anthropic';
|
9 |
|
10 |
const staticModels: ModelInfo[] = [
|
11 |
{ name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet', provider: 'Anthropic' },
|
|
|
13 |
{ name: 'anthropic/claude-3.5-sonnet', label: 'Anthropic: Claude 3.5 Sonnet (OpenRouter)', provider: 'OpenRouter' },
|
14 |
{ name: 'anthropic/claude-3-haiku', label: 'Anthropic: Claude 3 Haiku (OpenRouter)', provider: 'OpenRouter' },
|
15 |
{ name: 'deepseek/deepseek-coder', label: 'Deepseek-Coder V2 236B (OpenRouter)', provider: 'OpenRouter' },
|
16 |
+
{ name: 'google/gemini-flash-1.5', label: 'Google Gemini Flash 1.5 (OpenRouter)', provider: 'OpenRouter' },
|
17 |
+
{ name: 'google/gemini-pro-1.5', label: 'Google Gemini Pro 1.5 (OpenRouter)', provider: 'OpenRouter' },
|
18 |
+
{ name: 'x-ai/grok-beta', label: "xAI Grok Beta (OpenRouter)", provider: 'OpenRouter' },
|
19 |
{ name: 'mistralai/mistral-nemo', label: 'OpenRouter Mistral Nemo (OpenRouter)', provider: 'OpenRouter' },
|
20 |
{ name: 'qwen/qwen-110b-chat', label: 'OpenRouter Qwen 110b Chat (OpenRouter)', provider: 'OpenRouter' },
|
21 |
{ name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter' },
|
|
|
33 |
{ name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI' },
|
34 |
{ name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI' },
|
35 |
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI' },
|
36 |
+
{ name: 'grok-beta', label: "xAI Grok Beta", provider: 'xAI' },
|
37 |
{ name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek'},
|
38 |
{ name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek'},
|
39 |
{ name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral' },
|
|
|
49 |
|
50 |
export let MODEL_LIST: ModelInfo[] = [...staticModels];
|
51 |
|
52 |
+
const getOllamaBaseUrl = () => {
|
53 |
+
const defaultBaseUrl = import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434';
|
54 |
+
// Check if we're in the browser
|
55 |
+
if (typeof window !== 'undefined') {
|
56 |
+
// Frontend always uses localhost
|
57 |
+
return defaultBaseUrl;
|
58 |
+
}
|
59 |
+
|
60 |
+
// Backend: Check if we're running in Docker
|
61 |
+
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
|
62 |
+
|
63 |
+
return isDocker
|
64 |
+
? defaultBaseUrl.replace("localhost", "host.docker.internal")
|
65 |
+
: defaultBaseUrl;
|
66 |
+
};
|
67 |
+
|
68 |
async function getOllamaModels(): Promise<ModelInfo[]> {
|
69 |
try {
|
70 |
+
const base_url = getOllamaBaseUrl();
|
71 |
const response = await fetch(`${base_url}/api/tags`);
|
72 |
const data = await response.json() as OllamaApiResponse;
|
73 |
|
|
|
110 |
MODEL_LIST = [...ollamaModels,...openAiLikeModels, ...staticModels];
|
111 |
}
|
112 |
initializeModelList().then();
|
113 |
+
export { getOllamaModels, getOpenAILikeModels, initializeModelList };
|