updated ollama to use defined base URL for model calls
Browse files- .gitignore +1 -1
- app/lib/.server/llm/api-key.ts +2 -0
- app/lib/.server/llm/model.ts +5 -3
- app/utils/constants.ts +1 -1
.gitignore
CHANGED
@@ -12,7 +12,7 @@ dist-ssr
|
|
12 |
*.local
|
13 |
|
14 |
.vscode/*
|
15 |
-
|
16 |
!.vscode/extensions.json
|
17 |
.idea
|
18 |
.DS_Store
|
|
|
12 |
*.local
|
13 |
|
14 |
.vscode/*
|
15 |
+
.vscode/launch.json
|
16 |
!.vscode/extensions.json
|
17 |
.idea
|
18 |
.DS_Store
|
app/lib/.server/llm/api-key.ts
CHANGED
@@ -30,6 +30,8 @@ export function getBaseURL(cloudflareEnv: Env, provider: string) {
|
|
30 |
switch (provider) {
|
31 |
case 'OpenAILike':
|
32 |
return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
|
|
|
|
|
33 |
default:
|
34 |
return "";
|
35 |
}
|
|
|
30 |
switch (provider) {
|
31 |
case 'OpenAILike':
|
32 |
return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
|
33 |
+
case 'Ollama':
|
34 |
+
return env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL;
|
35 |
default:
|
36 |
return "";
|
37 |
}
|
app/lib/.server/llm/model.ts
CHANGED
@@ -47,8 +47,10 @@ export function getGroqModel(apiKey: string, model: string) {
|
|
47 |
return openai(model);
|
48 |
}
|
49 |
|
50 |
-
export function getOllamaModel(model: string) {
|
51 |
-
|
|
|
|
|
52 |
}
|
53 |
|
54 |
export function getOpenRouterModel(apiKey: string, model: string) {
|
@@ -77,6 +79,6 @@ export function getModel(provider: string, model: string, env: Env) {
|
|
77 |
case 'OpenAILike':
|
78 |
return getOpenAILikeModel(baseURL,apiKey, model);
|
79 |
default:
|
80 |
-
return getOllamaModel(model);
|
81 |
}
|
82 |
}
|
|
|
47 |
return openai(model);
|
48 |
}
|
49 |
|
50 |
+
export function getOllamaModel(baseURL: string, model: string) {
|
51 |
+
let Ollama = ollama(model);
|
52 |
+
Ollama.config.baseURL = `${baseURL}/api`;
|
53 |
+
return Ollama;
|
54 |
}
|
55 |
|
56 |
export function getOpenRouterModel(apiKey: string, model: string) {
|
|
|
79 |
case 'OpenAILike':
|
80 |
return getOpenAILikeModel(baseURL,apiKey, model);
|
81 |
default:
|
82 |
+
return getOllamaModel(baseURL, model);
|
83 |
}
|
84 |
}
|
app/utils/constants.ts
CHANGED
@@ -38,7 +38,7 @@ async function getOllamaModels(): Promise<ModelInfo[]> {
|
|
38 |
try {
|
39 |
const base_url =import.meta.env.OLLAMA_API_BASE_URL || "http://localhost:11434";
|
40 |
const url = new URL(base_url).toString();
|
41 |
-
const response = await fetch(`${url}
|
42 |
const data = await response.json();
|
43 |
|
44 |
return data.models.map((model: any) => ({
|
|
|
38 |
try {
|
39 |
const base_url =import.meta.env.OLLAMA_API_BASE_URL || "http://localhost:11434";
|
40 |
const url = new URL(base_url).toString();
|
41 |
+
const response = await fetch(`${url}api/tags`);
|
42 |
const data = await response.json();
|
43 |
|
44 |
return data.models.map((model: any) => ({
|