yunat
commited on
Commit
·
0d77763
1
Parent(s):
4f7a06f
let the ollama models be auto generated from ollama api
Browse files- .env.example +5 -1
- app/entry.server.tsx +3 -0
- app/routes/api.models.ts +6 -0
- app/utils/constants.ts +51 -34
- app/utils/types.ts +28 -0
- worker-configuration.d.ts +1 -0
.env.example
CHANGED
@@ -20,5 +20,9 @@ ANTHROPIC_API_KEY=
|
|
20 |
# You only need this environment variable set if you want to use OpenRouter models
|
21 |
OPEN_ROUTER_API_KEY=
|
22 |
|
|
|
|
|
|
|
|
|
23 |
# Include this environment variable if you want more logging for debugging locally
|
24 |
-
VITE_LOG_LEVEL=debug
|
|
|
20 |
# You only need this environment variable set if you want to use OpenRouter models
|
21 |
OPEN_ROUTER_API_KEY=
|
22 |
|
23 |
+
# You only need this environment variable set if you want to use oLLAMA models
|
24 |
+
#EXAMPLE http://localhost:11434
|
25 |
+
OLLAMA_API_BASE_URL=
|
26 |
+
|
27 |
# Include this environment variable if you want more logging for debugging locally
|
28 |
+
VITE_LOG_LEVEL=debug
|
app/entry.server.tsx
CHANGED
@@ -5,6 +5,7 @@ import { renderToReadableStream } from 'react-dom/server';
|
|
5 |
import { renderHeadToString } from 'remix-island';
|
6 |
import { Head } from './root';
|
7 |
import { themeStore } from '~/lib/stores/theme';
|
|
|
8 |
|
9 |
export default async function handleRequest(
|
10 |
request: Request,
|
@@ -13,6 +14,8 @@ export default async function handleRequest(
|
|
13 |
remixContext: EntryContext,
|
14 |
_loadContext: AppLoadContext,
|
15 |
) {
|
|
|
|
|
16 |
const readable = await renderToReadableStream(<RemixServer context={remixContext} url={request.url} />, {
|
17 |
signal: request.signal,
|
18 |
onError(error: unknown) {
|
|
|
5 |
import { renderHeadToString } from 'remix-island';
|
6 |
import { Head } from './root';
|
7 |
import { themeStore } from '~/lib/stores/theme';
|
8 |
+
import { initializeModelList } from '~/utils/constants';
|
9 |
|
10 |
export default async function handleRequest(
|
11 |
request: Request,
|
|
|
14 |
remixContext: EntryContext,
|
15 |
_loadContext: AppLoadContext,
|
16 |
) {
|
17 |
+
await initializeModelList();
|
18 |
+
|
19 |
const readable = await renderToReadableStream(<RemixServer context={remixContext} url={request.url} />, {
|
20 |
signal: request.signal,
|
21 |
onError(error: unknown) {
|
app/routes/api.models.ts
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { json } from '@remix-run/cloudflare';
|
2 |
+
import { MODEL_LIST } from '~/utils/constants';
|
3 |
+
|
4 |
+
export async function loader() {
|
5 |
+
return json(MODEL_LIST);
|
6 |
+
}
|
app/utils/constants.ts
CHANGED
@@ -1,38 +1,55 @@
|
|
|
|
|
|
1 |
export const WORK_DIR_NAME = 'project';
|
2 |
export const WORK_DIR = `/home/${WORK_DIR_NAME}`;
|
3 |
export const MODIFICATIONS_TAG_NAME = 'bolt_file_modifications';
|
4 |
export const MODEL_REGEX = /^\[Model: (.*?)\]\n\n/;
|
5 |
-
export const DEFAULT_MODEL =
|
6 |
-
export const DEFAULT_PROVIDER =
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import type { ModelInfo } from './types';
|
2 |
+
|
3 |
export const WORK_DIR_NAME = 'project';
|
4 |
export const WORK_DIR = `/home/${WORK_DIR_NAME}`;
|
5 |
export const MODIFICATIONS_TAG_NAME = 'bolt_file_modifications';
|
6 |
export const MODEL_REGEX = /^\[Model: (.*?)\]\n\n/;
|
7 |
+
export const DEFAULT_MODEL = 'claude-3-5-sonnet-20240620';
|
8 |
+
export const DEFAULT_PROVIDER = 'Anthropic';
|
9 |
+
|
10 |
+
const staticModels: ModelInfo[] = [
|
11 |
+
{ name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet', provider: 'Anthropic' },
|
12 |
+
{ name: 'gpt-4o', label: 'GPT-4o', provider: 'OpenAI' },
|
13 |
+
{ name: 'deepseek/deepseek-coder', label: 'Deepseek-Coder V2 236B (OpenRouter)', provider: 'OpenRouter' },
|
14 |
+
{ name: 'google/gemini-flash-1.5', label: 'Google Gemini Flash 1.5 (OpenRouter)', provider: 'OpenRouter' },
|
15 |
+
{ name: 'google/gemini-pro-1.5', label: 'Google Gemini Pro 1.5 (OpenRouter)', provider: 'OpenRouter' },
|
16 |
+
{ name: 'mistralai/mistral-nemo', label: 'OpenRouter Mistral Nemo (OpenRouter)', provider: 'OpenRouter' },
|
17 |
+
{ name: 'qwen/qwen-110b-chat', label: 'OpenRouter Qwen 110b Chat (OpenRouter)', provider: 'OpenRouter' },
|
18 |
+
{ name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter' },
|
19 |
+
{ name: 'llama-3.1-70b-versatile', label: 'Llama 3.1 70b (Groq)', provider: 'Groq' },
|
20 |
+
{ name: 'llama-3.1-8b-instant', label: 'Llama 3.1 8b (Groq)', provider: 'Groq' },
|
21 |
+
{ name: 'llama-3.2-11b-vision-preview', label: 'Llama 3.2 11b (Groq)', provider: 'Groq' },
|
22 |
+
{ name: 'llama-3.2-3b-preview', label: 'Llama 3.2 3b (Groq)', provider: 'Groq' },
|
23 |
+
{ name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq' },
|
24 |
+
{ name: 'claude-3-opus-20240229', label: 'Claude 3 Opus', provider: 'Anthropic' },
|
25 |
+
{ name: 'claude-3-sonnet-20240229', label: 'Claude 3 Sonnet', provider: 'Anthropic' },
|
26 |
+
{ name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic' },
|
27 |
+
{ name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI' },
|
28 |
+
{ name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI' },
|
29 |
+
{ name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI' },
|
30 |
+
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI' },
|
31 |
+
];
|
32 |
+
|
33 |
+
export let MODEL_LIST: ModelInfo[] = [...staticModels];
|
34 |
+
|
35 |
+
async function getOllamaModels(): Promise<ModelInfo[]> {
|
36 |
+
try {
|
37 |
+
const response = await fetch(`http://localhost:11434/api/tags`);
|
38 |
+
const data = await response.json();
|
39 |
+
|
40 |
+
return data.models.map((model: any) => ({
|
41 |
+
name: model.name,
|
42 |
+
label: `${model.name} (${model.details.parameter_size})`,
|
43 |
+
provider: 'Ollama',
|
44 |
+
}));
|
45 |
+
} catch (e) {
|
46 |
+
return [];
|
47 |
+
}
|
48 |
+
}
|
49 |
+
|
50 |
+
async function initializeModelList(): Promise<void> {
|
51 |
+
const ollamaModels = await getOllamaModels();
|
52 |
+
MODEL_LIST = [...ollamaModels, ...staticModels];
|
53 |
+
}
|
54 |
+
initializeModelList().then();
|
55 |
+
export { getOllamaModels, initializeModelList };
|
app/utils/types.ts
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
interface OllamaModelDetails {
|
3 |
+
parent_model: string;
|
4 |
+
format: string;
|
5 |
+
family: string;
|
6 |
+
families: string[];
|
7 |
+
parameter_size: string;
|
8 |
+
quantization_level: string;
|
9 |
+
}
|
10 |
+
|
11 |
+
interface OllamaModel {
|
12 |
+
name: string;
|
13 |
+
model: string;
|
14 |
+
modified_at: string;
|
15 |
+
size: number;
|
16 |
+
digest: string;
|
17 |
+
details: OllamaModelDetails;
|
18 |
+
}
|
19 |
+
|
20 |
+
export interface OllamaApiResponse {
|
21 |
+
models: OllamaModel[];
|
22 |
+
}
|
23 |
+
|
24 |
+
export interface ModelInfo {
|
25 |
+
name: string;
|
26 |
+
label: string;
|
27 |
+
provider: string;
|
28 |
+
}
|
worker-configuration.d.ts
CHANGED
@@ -3,4 +3,5 @@ interface Env {
|
|
3 |
OPENAI_API_KEY: string;
|
4 |
GROQ_API_KEY: string;
|
5 |
OPEN_ROUTER_API_KEY: string;
|
|
|
6 |
}
|
|
|
3 |
OPENAI_API_KEY: string;
|
4 |
GROQ_API_KEY: string;
|
5 |
OPEN_ROUTER_API_KEY: string;
|
6 |
+
OLLAMA_API_BASE_URL: string;
|
7 |
}
|