codacus commited on
Commit
5d4b860
·
1 Parent(s): b4d0597

updated to adapth baseurl setup

Browse files
app/components/chat/BaseChat.tsx CHANGED
@@ -17,7 +17,6 @@ import Cookies from 'js-cookie';
17
  import * as Tooltip from '@radix-ui/react-tooltip';
18
 
19
  import styles from './BaseChat.module.scss';
20
- import type { ProviderInfo } from '~/utils/types';
21
  import { ExportChatButton } from '~/components/chat/chatExportAndImport/ExportChatButton';
22
  import { ImportButtons } from '~/components/chat/chatExportAndImport/ImportButtons';
23
  import { ExamplePrompts } from '~/components/chat/ExamplePrompts';
@@ -26,6 +25,7 @@ import GitCloneButton from './GitCloneButton';
26
  import FilePreview from './FilePreview';
27
  import { ModelSelector } from '~/components/chat/ModelSelector';
28
  import { SpeechRecognitionButton } from '~/components/chat/SpeechRecognition';
 
29
 
30
  const TEXTAREA_MIN_HEIGHT = 76;
31
 
@@ -131,7 +131,26 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
131
  Cookies.remove('apiKeys');
132
  }
133
 
134
- initializeModelList().then((modelList) => {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
135
  setModelList(modelList);
136
  });
137
 
 
17
  import * as Tooltip from '@radix-ui/react-tooltip';
18
 
19
  import styles from './BaseChat.module.scss';
 
20
  import { ExportChatButton } from '~/components/chat/chatExportAndImport/ExportChatButton';
21
  import { ImportButtons } from '~/components/chat/chatExportAndImport/ImportButtons';
22
  import { ExamplePrompts } from '~/components/chat/ExamplePrompts';
 
25
  import FilePreview from './FilePreview';
26
  import { ModelSelector } from '~/components/chat/ModelSelector';
27
  import { SpeechRecognitionButton } from '~/components/chat/SpeechRecognition';
28
+ import type { IProviderSetting, ProviderInfo } from '~/types/model';
29
 
30
  const TEXTAREA_MIN_HEIGHT = 76;
31
 
 
131
  Cookies.remove('apiKeys');
132
  }
133
 
134
+ let providerSettings: Record<string, IProviderSetting> | undefined = undefined;
135
+
136
+ try {
137
+ const savedProviderSettings = Cookies.get('providers');
138
+
139
+ if (savedProviderSettings) {
140
+ const parsedProviderSettings = JSON.parse(savedProviderSettings);
141
+
142
+ if (typeof parsedProviderSettings === 'object' && parsedProviderSettings !== null) {
143
+ providerSettings = parsedProviderSettings;
144
+ }
145
+ }
146
+ } catch (error) {
147
+ console.error('Error loading Provider Settings from cookies:', error);
148
+
149
+ // Clear invalid cookie data
150
+ Cookies.remove('providers');
151
+ }
152
+
153
+ initializeModelList(providerSettings).then((modelList) => {
154
  setModelList(modelList);
155
  });
156
 
app/components/chat/Chat.client.tsx CHANGED
@@ -17,9 +17,9 @@ import { cubicEasingFn } from '~/utils/easings';
17
  import { createScopedLogger, renderLogger } from '~/utils/logger';
18
  import { BaseChat } from './BaseChat';
19
  import Cookies from 'js-cookie';
20
- import type { ProviderInfo } from '~/utils/types';
21
  import { debounce } from '~/utils/debounce';
22
  import { useSettings } from '~/lib/hooks/useSettings';
 
23
 
24
  const toastAnimation = cssTransition({
25
  enter: 'animated fadeInRight',
 
17
  import { createScopedLogger, renderLogger } from '~/utils/logger';
18
  import { BaseChat } from './BaseChat';
19
  import Cookies from 'js-cookie';
 
20
  import { debounce } from '~/utils/debounce';
21
  import { useSettings } from '~/lib/hooks/useSettings';
22
+ import type { ProviderInfo } from '~/types/model';
23
 
24
  const toastAnimation = cssTransition({
25
  enter: 'animated fadeInRight',
app/components/settings/providers/ProvidersTab.tsx CHANGED
@@ -1,7 +1,8 @@
1
  import React, { useEffect, useState } from 'react';
2
  import { Switch } from '~/components/ui/Switch';
3
  import { useSettings } from '~/lib/hooks/useSettings';
4
- import { LOCAL_PROVIDERS, URL_CONFIGURABLE_PROVIDERS, type IProviderConfig } from '~/lib/stores/settings';
 
5
 
6
  export default function ProvidersTab() {
7
  const { providers, updateProviderSettings, isLocalModel } = useSettings();
 
1
  import React, { useEffect, useState } from 'react';
2
  import { Switch } from '~/components/ui/Switch';
3
  import { useSettings } from '~/lib/hooks/useSettings';
4
+ import { LOCAL_PROVIDERS, URL_CONFIGURABLE_PROVIDERS } from '~/lib/stores/settings';
5
+ import type { IProviderConfig } from '~/types/model';
6
 
7
  export default function ProvidersTab() {
8
  const { providers, updateProviderSettings, isLocalModel } = useSettings();
app/lib/.server/llm/model.ts CHANGED
@@ -11,6 +11,7 @@ import { createOpenRouter } from '@openrouter/ai-sdk-provider';
11
  import { createMistral } from '@ai-sdk/mistral';
12
  import { createCohere } from '@ai-sdk/cohere';
13
  import type { LanguageModelV1 } from 'ai';
 
14
 
15
  export const DEFAULT_NUM_CTX = process.env.DEFAULT_NUM_CTX ? parseInt(process.env.DEFAULT_NUM_CTX, 10) : 32768;
16
 
@@ -127,14 +128,20 @@ export function getXAIModel(apiKey: OptionalApiKey, model: string) {
127
  return openai(model);
128
  }
129
 
130
- export function getModel(provider: string, model: string, env: Env, apiKeys?: Record<string, string>) {
 
 
 
 
 
 
131
  /*
132
  * let apiKey; // Declare first
133
  * let baseURL;
134
  */
135
 
136
  const apiKey = getAPIKey(env, provider, apiKeys); // Then assign
137
- const baseURL = getBaseURL(env, provider);
138
 
139
  switch (provider) {
140
  case 'Anthropic':
 
11
  import { createMistral } from '@ai-sdk/mistral';
12
  import { createCohere } from '@ai-sdk/cohere';
13
  import type { LanguageModelV1 } from 'ai';
14
+ import type { IProviderSetting } from '~/types/model';
15
 
16
  export const DEFAULT_NUM_CTX = process.env.DEFAULT_NUM_CTX ? parseInt(process.env.DEFAULT_NUM_CTX, 10) : 32768;
17
 
 
128
  return openai(model);
129
  }
130
 
131
+ export function getModel(
132
+ provider: string,
133
+ model: string,
134
+ env: Env,
135
+ apiKeys?: Record<string, string>,
136
+ providerSettings?: Record<string, IProviderSetting>,
137
+ ) {
138
  /*
139
  * let apiKey; // Declare first
140
  * let baseURL;
141
  */
142
 
143
  const apiKey = getAPIKey(env, provider, apiKeys); // Then assign
144
+ const baseURL = providerSettings?.[provider].baseUrl || getBaseURL(env, provider);
145
 
146
  switch (provider) {
147
  case 'Anthropic':
app/lib/.server/llm/stream-text.ts CHANGED
@@ -3,6 +3,7 @@ import { getModel } from '~/lib/.server/llm/model';
3
  import { MAX_TOKENS } from './constants';
4
  import { getSystemPrompt } from './prompts';
5
  import { DEFAULT_MODEL, DEFAULT_PROVIDER, getModelList, MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';
 
6
 
7
  interface ToolResult<Name extends string, Args, Result> {
8
  toolCallId: string;
@@ -58,15 +59,17 @@ function extractPropertiesFromMessage(message: Message): { model: string; provid
58
  return { model, provider, content: cleanedContent };
59
  }
60
 
61
- export async function streamText(
62
- messages: Messages,
63
- env: Env,
64
- options?: StreamingOptions,
65
- apiKeys?: Record<string, string>,
66
- ) {
 
 
67
  let currentModel = DEFAULT_MODEL;
68
  let currentProvider = DEFAULT_PROVIDER.name;
69
- const MODEL_LIST = await getModelList(apiKeys || {});
70
  const processedMessages = messages.map((message) => {
71
  if (message.role === 'user') {
72
  const { model, provider, content } = extractPropertiesFromMessage(message);
@@ -88,7 +91,7 @@ export async function streamText(
88
  const dynamicMaxTokens = modelDetails && modelDetails.maxTokenAllowed ? modelDetails.maxTokenAllowed : MAX_TOKENS;
89
 
90
  return _streamText({
91
- model: getModel(currentProvider, currentModel, env, apiKeys) as any,
92
  system: getSystemPrompt(),
93
  maxTokens: dynamicMaxTokens,
94
  messages: convertToCoreMessages(processedMessages as any),
 
3
  import { MAX_TOKENS } from './constants';
4
  import { getSystemPrompt } from './prompts';
5
  import { DEFAULT_MODEL, DEFAULT_PROVIDER, getModelList, MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';
6
+ import type { IProviderSetting } from '~/types/model';
7
 
8
  interface ToolResult<Name extends string, Args, Result> {
9
  toolCallId: string;
 
59
  return { model, provider, content: cleanedContent };
60
  }
61
 
62
+ export async function streamText(props: {
63
+ messages: Messages;
64
+ env: Env;
65
+ options?: StreamingOptions;
66
+ apiKeys?: Record<string, string>;
67
+ providerSettings?: Record<string, IProviderSetting>;
68
+ }) {
69
+ const { messages, env, options, apiKeys, providerSettings } = props;
70
  let currentModel = DEFAULT_MODEL;
71
  let currentProvider = DEFAULT_PROVIDER.name;
72
+ const MODEL_LIST = await getModelList(apiKeys || {}, providerSettings);
73
  const processedMessages = messages.map((message) => {
74
  if (message.role === 'user') {
75
  const { model, provider, content } = extractPropertiesFromMessage(message);
 
91
  const dynamicMaxTokens = modelDetails && modelDetails.maxTokenAllowed ? modelDetails.maxTokenAllowed : MAX_TOKENS;
92
 
93
  return _streamText({
94
+ model: getModel(currentProvider, currentModel, env, apiKeys, providerSettings) as any,
95
  system: getSystemPrompt(),
96
  maxTokens: dynamicMaxTokens,
97
  messages: convertToCoreMessages(processedMessages as any),
app/lib/hooks/useSettings.tsx CHANGED
@@ -1,14 +1,8 @@
1
  import { useStore } from '@nanostores/react';
2
- import {
3
- isDebugMode,
4
- isLocalModelsEnabled,
5
- LOCAL_PROVIDERS,
6
- providersStore,
7
- type IProviderSetting,
8
- } from '~/lib/stores/settings';
9
  import { useCallback, useEffect, useState } from 'react';
10
  import Cookies from 'js-cookie';
11
- import type { ProviderInfo } from '~/utils/types';
12
 
13
  export function useSettings() {
14
  const providers = useStore(providersStore);
 
1
  import { useStore } from '@nanostores/react';
2
+ import { isDebugMode, isLocalModelsEnabled, LOCAL_PROVIDERS, providersStore } from '~/lib/stores/settings';
 
 
 
 
 
 
3
  import { useCallback, useEffect, useState } from 'react';
4
  import Cookies from 'js-cookie';
5
+ import type { IProviderSetting, ProviderInfo } from '~/types/model';
6
 
7
  export function useSettings() {
8
  const providers = useStore(providersStore);
app/lib/stores/settings.ts CHANGED
@@ -1,7 +1,7 @@
1
  import { atom, map } from 'nanostores';
2
  import { workbenchStore } from './workbench';
3
- import type { ProviderInfo } from '~/utils/types';
4
  import { PROVIDER_LIST } from '~/utils/constants';
 
5
 
6
  export interface Shortcut {
7
  key: string;
@@ -17,14 +17,6 @@ export interface Shortcuts {
17
  toggleTerminal: Shortcut;
18
  }
19
 
20
- export interface IProviderSetting {
21
- enabled?: boolean;
22
- baseUrl?: string;
23
- }
24
- export type IProviderConfig = ProviderInfo & {
25
- settings: IProviderSetting;
26
- };
27
-
28
  export const URL_CONFIGURABLE_PROVIDERS = ['Ollama', 'LMStudio', 'OpenAILike'];
29
  export const LOCAL_PROVIDERS = ['OpenAILike', 'LMStudio', 'Ollama'];
30
 
 
1
  import { atom, map } from 'nanostores';
2
  import { workbenchStore } from './workbench';
 
3
  import { PROVIDER_LIST } from '~/utils/constants';
4
+ import type { IProviderConfig } from '~/types/model';
5
 
6
  export interface Shortcut {
7
  key: string;
 
17
  toggleTerminal: Shortcut;
18
  }
19
 
 
 
 
 
 
 
 
 
20
  export const URL_CONFIGURABLE_PROVIDERS = ['Ollama', 'LMStudio', 'OpenAILike'];
21
  export const LOCAL_PROVIDERS = ['OpenAILike', 'LMStudio', 'Ollama'];
22
 
app/routes/api.chat.ts CHANGED
@@ -3,6 +3,7 @@ import { MAX_RESPONSE_SEGMENTS, MAX_TOKENS } from '~/lib/.server/llm/constants';
3
  import { CONTINUE_PROMPT } from '~/lib/.server/llm/prompts';
4
  import { streamText, type Messages, type StreamingOptions } from '~/lib/.server/llm/stream-text';
5
  import SwitchableStream from '~/lib/.server/llm/switchable-stream';
 
6
 
7
  export async function action(args: ActionFunctionArgs) {
8
  return chatAction(args);
@@ -38,6 +39,9 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
38
 
39
  // Parse the cookie's value (returns an object or null if no cookie exists)
40
  const apiKeys = JSON.parse(parseCookies(cookieHeader || '').apiKeys || '{}');
 
 
 
41
 
42
  const stream = new SwitchableStream();
43
 
@@ -60,13 +64,13 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
60
  messages.push({ role: 'assistant', content });
61
  messages.push({ role: 'user', content: CONTINUE_PROMPT });
62
 
63
- const result = await streamText(messages, context.cloudflare.env, options, apiKeys);
64
 
65
  return stream.switchSource(result.toAIStream());
66
  },
67
  };
68
 
69
- const result = await streamText(messages, context.cloudflare.env, options, apiKeys);
70
 
71
  stream.switchSource(result.toAIStream());
72
 
 
3
  import { CONTINUE_PROMPT } from '~/lib/.server/llm/prompts';
4
  import { streamText, type Messages, type StreamingOptions } from '~/lib/.server/llm/stream-text';
5
  import SwitchableStream from '~/lib/.server/llm/switchable-stream';
6
+ import type { IProviderSetting } from '~/types/model';
7
 
8
  export async function action(args: ActionFunctionArgs) {
9
  return chatAction(args);
 
39
 
40
  // Parse the cookie's value (returns an object or null if no cookie exists)
41
  const apiKeys = JSON.parse(parseCookies(cookieHeader || '').apiKeys || '{}');
42
+ const providerSettings: Record<string, IProviderSetting> = JSON.parse(
43
+ parseCookies(cookieHeader || '').providers || '{}',
44
+ );
45
 
46
  const stream = new SwitchableStream();
47
 
 
64
  messages.push({ role: 'assistant', content });
65
  messages.push({ role: 'user', content: CONTINUE_PROMPT });
66
 
67
+ const result = await streamText({ messages, env: context.cloudflare.env, options, apiKeys, providerSettings });
68
 
69
  return stream.switchSource(result.toAIStream());
70
  },
71
  };
72
 
73
+ const result = await streamText({ messages, env: context.cloudflare.env, options, apiKeys, providerSettings });
74
 
75
  stream.switchSource(result.toAIStream());
76
 
app/routes/api.enhancer.ts CHANGED
@@ -2,7 +2,7 @@ import { type ActionFunctionArgs } from '@remix-run/cloudflare';
2
  import { StreamingTextResponse, parseStreamPart } from 'ai';
3
  import { streamText } from '~/lib/.server/llm/stream-text';
4
  import { stripIndents } from '~/utils/stripIndent';
5
- import type { ProviderInfo } from '~/types/model';
6
 
7
  const encoder = new TextEncoder();
8
  const decoder = new TextDecoder();
@@ -11,8 +11,28 @@ export async function action(args: ActionFunctionArgs) {
11
  return enhancerAction(args);
12
  }
13
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  async function enhancerAction({ context, request }: ActionFunctionArgs) {
15
- const { message, model, provider, apiKeys } = await request.json<{
16
  message: string;
17
  model: string;
18
  provider: ProviderInfo;
@@ -36,9 +56,17 @@ async function enhancerAction({ context, request }: ActionFunctionArgs) {
36
  });
37
  }
38
 
 
 
 
 
 
 
 
 
39
  try {
40
- const result = await streamText(
41
- [
42
  {
43
  role: 'user',
44
  content:
@@ -73,10 +101,10 @@ async function enhancerAction({ context, request }: ActionFunctionArgs) {
73
  `,
74
  },
75
  ],
76
- context.cloudflare.env,
77
- undefined,
78
  apiKeys,
79
- );
 
80
 
81
  const transformStream = new TransformStream({
82
  transform(chunk, controller) {
 
2
  import { StreamingTextResponse, parseStreamPart } from 'ai';
3
  import { streamText } from '~/lib/.server/llm/stream-text';
4
  import { stripIndents } from '~/utils/stripIndent';
5
+ import type { IProviderSetting, ProviderInfo } from '~/types/model';
6
 
7
  const encoder = new TextEncoder();
8
  const decoder = new TextDecoder();
 
11
  return enhancerAction(args);
12
  }
13
 
14
+ function parseCookies(cookieHeader: string) {
15
+ const cookies: any = {};
16
+
17
+ // Split the cookie string by semicolons and spaces
18
+ const items = cookieHeader.split(';').map((cookie) => cookie.trim());
19
+
20
+ items.forEach((item) => {
21
+ const [name, ...rest] = item.split('=');
22
+
23
+ if (name && rest) {
24
+ // Decode the name and value, and join value parts in case it contains '='
25
+ const decodedName = decodeURIComponent(name.trim());
26
+ const decodedValue = decodeURIComponent(rest.join('=').trim());
27
+ cookies[decodedName] = decodedValue;
28
+ }
29
+ });
30
+
31
+ return cookies;
32
+ }
33
+
34
  async function enhancerAction({ context, request }: ActionFunctionArgs) {
35
+ const { message, model, provider } = await request.json<{
36
  message: string;
37
  model: string;
38
  provider: ProviderInfo;
 
56
  });
57
  }
58
 
59
+ const cookieHeader = request.headers.get('Cookie');
60
+
61
+ // Parse the cookie's value (returns an object or null if no cookie exists)
62
+ const apiKeys = JSON.parse(parseCookies(cookieHeader || '').apiKeys || '{}');
63
+ const providerSettings: Record<string, IProviderSetting> = JSON.parse(
64
+ parseCookies(cookieHeader || '').providers || '{}',
65
+ );
66
+
67
  try {
68
+ const result = await streamText({
69
+ messages: [
70
  {
71
  role: 'user',
72
  content:
 
101
  `,
102
  },
103
  ],
104
+ env: context.cloudflare.env,
 
105
  apiKeys,
106
+ providerSettings,
107
+ });
108
 
109
  const transformStream = new TransformStream({
110
  transform(chunk, controller) {
app/types/model.ts CHANGED
@@ -3,9 +3,17 @@ import type { ModelInfo } from '~/utils/types';
3
  export type ProviderInfo = {
4
  staticModels: ModelInfo[];
5
  name: string;
6
- getDynamicModels?: (apiKeys?: Record<string, string>) => Promise<ModelInfo[]>;
7
  getApiKeyLink?: string;
8
  labelForGetApiKey?: string;
9
  icon?: string;
10
- isEnabled?: boolean;
 
 
 
 
 
 
 
 
11
  };
 
3
  export type ProviderInfo = {
4
  staticModels: ModelInfo[];
5
  name: string;
6
+ getDynamicModels?: (apiKeys?: Record<string, string>, providerSettings?: IProviderSetting) => Promise<ModelInfo[]>;
7
  getApiKeyLink?: string;
8
  labelForGetApiKey?: string;
9
  icon?: string;
10
+ };
11
+
12
+ export interface IProviderSetting {
13
+ enabled?: boolean;
14
+ baseUrl?: string;
15
+ }
16
+
17
+ export type IProviderConfig = ProviderInfo & {
18
+ settings: IProviderSetting;
19
  };
app/utils/constants.ts CHANGED
@@ -1,6 +1,6 @@
1
  import Cookies from 'js-cookie';
2
  import type { ModelInfo, OllamaApiResponse, OllamaModel } from './types';
3
- import type { ProviderInfo } from '~/types/model';
4
 
5
  export const WORK_DIR_NAME = 'project';
6
  export const WORK_DIR = `/home/${WORK_DIR_NAME}`;
@@ -295,13 +295,16 @@ const staticModels: ModelInfo[] = PROVIDER_LIST.map((p) => p.staticModels).flat(
295
 
296
  export let MODEL_LIST: ModelInfo[] = [...staticModels];
297
 
298
- export async function getModelList(apiKeys: Record<string, string>) {
 
 
 
299
  MODEL_LIST = [
300
  ...(
301
  await Promise.all(
302
  PROVIDER_LIST.filter(
303
  (p): p is ProviderInfo & { getDynamicModels: () => Promise<ModelInfo[]> } => !!p.getDynamicModels,
304
- ).map((p) => p.getDynamicModels(apiKeys)),
305
  )
306
  ).flat(),
307
  ...staticModels,
@@ -309,9 +312,9 @@ export async function getModelList(apiKeys: Record<string, string>) {
309
  return MODEL_LIST;
310
  }
311
 
312
- async function getTogetherModels(apiKeys?: Record<string, string>): Promise<ModelInfo[]> {
313
  try {
314
- const baseUrl = import.meta.env.TOGETHER_API_BASE_URL || '';
315
  const provider = 'Together';
316
 
317
  if (!baseUrl) {
@@ -350,8 +353,8 @@ async function getTogetherModels(apiKeys?: Record<string, string>): Promise<Mode
350
  }
351
  }
352
 
353
- const getOllamaBaseUrl = () => {
354
- const defaultBaseUrl = import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434';
355
 
356
  // Check if we're in the browser
357
  if (typeof window !== 'undefined') {
@@ -365,7 +368,7 @@ const getOllamaBaseUrl = () => {
365
  return isDocker ? defaultBaseUrl.replace('localhost', 'host.docker.internal') : defaultBaseUrl;
366
  };
367
 
368
- async function getOllamaModels(): Promise<ModelInfo[]> {
369
  /*
370
  * if (typeof window === 'undefined') {
371
  * return [];
@@ -373,7 +376,7 @@ async function getOllamaModels(): Promise<ModelInfo[]> {
373
  */
374
 
375
  try {
376
- const baseUrl = getOllamaBaseUrl();
377
  const response = await fetch(`${baseUrl}/api/tags`);
378
  const data = (await response.json()) as OllamaApiResponse;
379
 
@@ -389,20 +392,21 @@ async function getOllamaModels(): Promise<ModelInfo[]> {
389
  }
390
  }
391
 
392
- async function getOpenAILikeModels(): Promise<ModelInfo[]> {
 
 
 
393
  try {
394
- const baseUrl = import.meta.env.OPENAI_LIKE_API_BASE_URL || '';
395
 
396
  if (!baseUrl) {
397
  return [];
398
  }
399
 
400
- let apiKey = import.meta.env.OPENAI_LIKE_API_KEY ?? '';
401
-
402
- const apikeys = JSON.parse(Cookies.get('apiKeys') || '{}');
403
 
404
- if (apikeys && apikeys.OpenAILike) {
405
- apiKey = apikeys.OpenAILike;
406
  }
407
 
408
  const response = await fetch(`${baseUrl}/models`, {
@@ -456,13 +460,13 @@ async function getOpenRouterModels(): Promise<ModelInfo[]> {
456
  }));
457
  }
458
 
459
- async function getLMStudioModels(): Promise<ModelInfo[]> {
460
  if (typeof window === 'undefined') {
461
  return [];
462
  }
463
 
464
  try {
465
- const baseUrl = import.meta.env.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
466
  const response = await fetch(`${baseUrl}/v1/models`);
467
  const data = (await response.json()) as any;
468
 
@@ -477,7 +481,7 @@ async function getLMStudioModels(): Promise<ModelInfo[]> {
477
  }
478
  }
479
 
480
- async function initializeModelList(): Promise<ModelInfo[]> {
481
  let apiKeys: Record<string, string> = {};
482
 
483
  try {
@@ -498,7 +502,7 @@ async function initializeModelList(): Promise<ModelInfo[]> {
498
  await Promise.all(
499
  PROVIDER_LIST.filter(
500
  (p): p is ProviderInfo & { getDynamicModels: () => Promise<ModelInfo[]> } => !!p.getDynamicModels,
501
- ).map((p) => p.getDynamicModels(apiKeys)),
502
  )
503
  ).flat(),
504
  ...staticModels,
 
1
  import Cookies from 'js-cookie';
2
  import type { ModelInfo, OllamaApiResponse, OllamaModel } from './types';
3
+ import type { ProviderInfo, IProviderSetting } from '~/types/model';
4
 
5
  export const WORK_DIR_NAME = 'project';
6
  export const WORK_DIR = `/home/${WORK_DIR_NAME}`;
 
295
 
296
  export let MODEL_LIST: ModelInfo[] = [...staticModels];
297
 
298
+ export async function getModelList(
299
+ apiKeys: Record<string, string>,
300
+ providerSettings?: Record<string, IProviderSetting>,
301
+ ) {
302
  MODEL_LIST = [
303
  ...(
304
  await Promise.all(
305
  PROVIDER_LIST.filter(
306
  (p): p is ProviderInfo & { getDynamicModels: () => Promise<ModelInfo[]> } => !!p.getDynamicModels,
307
+ ).map((p) => p.getDynamicModels(apiKeys, providerSettings?.[p.name])),
308
  )
309
  ).flat(),
310
  ...staticModels,
 
312
  return MODEL_LIST;
313
  }
314
 
315
+ async function getTogetherModels(apiKeys?: Record<string, string>, settings?: IProviderSetting): Promise<ModelInfo[]> {
316
  try {
317
+ const baseUrl = settings?.baseUrl || import.meta.env.TOGETHER_API_BASE_URL || '';
318
  const provider = 'Together';
319
 
320
  if (!baseUrl) {
 
353
  }
354
  }
355
 
356
+ const getOllamaBaseUrl = (settings?: IProviderSetting) => {
357
+ const defaultBaseUrl = settings?.baseUrl || import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434';
358
 
359
  // Check if we're in the browser
360
  if (typeof window !== 'undefined') {
 
368
  return isDocker ? defaultBaseUrl.replace('localhost', 'host.docker.internal') : defaultBaseUrl;
369
  };
370
 
371
+ async function getOllamaModels(apiKeys?: Record<string, string>, settings?: IProviderSetting): Promise<ModelInfo[]> {
372
  /*
373
  * if (typeof window === 'undefined') {
374
  * return [];
 
376
  */
377
 
378
  try {
379
+ const baseUrl = getOllamaBaseUrl(settings);
380
  const response = await fetch(`${baseUrl}/api/tags`);
381
  const data = (await response.json()) as OllamaApiResponse;
382
 
 
392
  }
393
  }
394
 
395
+ async function getOpenAILikeModels(
396
+ apiKeys?: Record<string, string>,
397
+ settings?: IProviderSetting,
398
+ ): Promise<ModelInfo[]> {
399
  try {
400
+ const baseUrl = settings?.baseUrl || import.meta.env.OPENAI_LIKE_API_BASE_URL || '';
401
 
402
  if (!baseUrl) {
403
  return [];
404
  }
405
 
406
+ let apiKey = '';
 
 
407
 
408
+ if (apiKeys && apiKeys.OpenAILike) {
409
+ apiKey = apiKeys.OpenAILike;
410
  }
411
 
412
  const response = await fetch(`${baseUrl}/models`, {
 
460
  }));
461
  }
462
 
463
+ async function getLMStudioModels(_apiKeys?: Record<string, string>, settings?: IProviderSetting): Promise<ModelInfo[]> {
464
  if (typeof window === 'undefined') {
465
  return [];
466
  }
467
 
468
  try {
469
+ const baseUrl = settings?.baseUrl || import.meta.env.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
470
  const response = await fetch(`${baseUrl}/v1/models`);
471
  const data = (await response.json()) as any;
472
 
 
481
  }
482
  }
483
 
484
+ async function initializeModelList(providerSettings?: Record<string, IProviderSetting>): Promise<ModelInfo[]> {
485
  let apiKeys: Record<string, string> = {};
486
 
487
  try {
 
502
  await Promise.all(
503
  PROVIDER_LIST.filter(
504
  (p): p is ProviderInfo & { getDynamicModels: () => Promise<ModelInfo[]> } => !!p.getDynamicModels,
505
+ ).map((p) => p.getDynamicModels(apiKeys, providerSettings?.[p.name])),
506
  )
507
  ).flat(),
508
  ...staticModels,
app/utils/types.ts CHANGED
@@ -26,12 +26,3 @@ export interface ModelInfo {
26
  provider: string;
27
  maxTokenAllowed: number;
28
  }
29
-
30
- export interface ProviderInfo {
31
- staticModels: ModelInfo[];
32
- name: string;
33
- getDynamicModels?: () => Promise<ModelInfo[]>;
34
- getApiKeyLink?: string;
35
- labelForGetApiKey?: string;
36
- icon?: string;
37
- }
 
26
  provider: string;
27
  maxTokenAllowed: number;
28
  }