codacus commited on
Commit
62ebfe5
·
1 Parent(s): fce8999

fix: .env file baseUrl Issue

Browse files
app/commit.json CHANGED
@@ -1 +1 @@
1
- { "commit": "eb6d4353565be31c6e20bfca2c5aea29e4f45b6d", "version": "0.0.3" }
 
1
+ { "commit": "fce8999f27c0affbc762dc90de992b5a759ab325" }
app/components/chat/BaseChat.tsx CHANGED
@@ -119,6 +119,9 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
119
 
120
  useEffect(() => {
121
  // Load API keys from cookies on component mount
 
 
 
122
  try {
123
  const storedApiKeys = Cookies.get('apiKeys');
124
 
@@ -127,6 +130,7 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
127
 
128
  if (typeof parsedKeys === 'object' && parsedKeys !== null) {
129
  setApiKeys(parsedKeys);
 
130
  }
131
  }
132
  } catch (error) {
@@ -155,7 +159,7 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
155
  Cookies.remove('providers');
156
  }
157
 
158
- initializeModelList(providerSettings).then((modelList) => {
159
  setModelList(modelList);
160
  });
161
 
 
119
 
120
  useEffect(() => {
121
  // Load API keys from cookies on component mount
122
+
123
+ let parsedApiKeys: Record<string, string> | undefined = {};
124
+
125
  try {
126
  const storedApiKeys = Cookies.get('apiKeys');
127
 
 
130
 
131
  if (typeof parsedKeys === 'object' && parsedKeys !== null) {
132
  setApiKeys(parsedKeys);
133
+ parsedApiKeys = parsedKeys;
134
  }
135
  }
136
  } catch (error) {
 
159
  Cookies.remove('providers');
160
  }
161
 
162
+ initializeModelList({ apiKeys: parsedApiKeys, providerSettings }).then((modelList) => {
163
  setModelList(modelList);
164
  });
165
 
app/components/settings/providers/ProvidersTab.tsx CHANGED
@@ -87,7 +87,12 @@ export default function ProvidersTab() {
87
  type="text"
88
  value={provider.settings.baseUrl || ''}
89
  onChange={(e) => {
90
- const newBaseUrl = e.target.value;
 
 
 
 
 
91
  updateProviderSettings(provider.name, { ...provider.settings, baseUrl: newBaseUrl });
92
  logStore.logProvider(`Base URL updated for ${provider.name}`, {
93
  provider: provider.name,
 
87
  type="text"
88
  value={provider.settings.baseUrl || ''}
89
  onChange={(e) => {
90
+ let newBaseUrl: string | undefined = e.target.value;
91
+
92
+ if (newBaseUrl && newBaseUrl.trim().length === 0) {
93
+ newBaseUrl = undefined;
94
+ }
95
+
96
  updateProviderSettings(provider.name, { ...provider.settings, baseUrl: newBaseUrl });
97
  logStore.logProvider(`Base URL updated for ${provider.name}`, {
98
  provider: provider.name,
app/entry.server.tsx CHANGED
@@ -14,7 +14,7 @@ export default async function handleRequest(
14
  remixContext: EntryContext,
15
  _loadContext: AppLoadContext,
16
  ) {
17
- await initializeModelList();
18
 
19
  const readable = await renderToReadableStream(<RemixServer context={remixContext} url={request.url} />, {
20
  signal: request.signal,
 
14
  remixContext: EntryContext,
15
  _loadContext: AppLoadContext,
16
  ) {
17
+ await initializeModelList({});
18
 
19
  const readable = await renderToReadableStream(<RemixServer context={remixContext} url={request.url} />, {
20
  signal: request.signal,
app/lib/.server/llm/api-key.ts CHANGED
@@ -3,6 +3,7 @@
3
  * Preventing TS checks with files presented in the video for a better presentation.
4
  */
5
  import { env } from 'node:process';
 
6
 
7
  export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Record<string, string>) {
8
  /**
@@ -50,16 +51,30 @@ export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Re
50
  }
51
  }
52
 
53
- export function getBaseURL(cloudflareEnv: Env, provider: string) {
 
 
 
 
 
 
54
  switch (provider) {
55
  case 'Together':
56
- return env.TOGETHER_API_BASE_URL || cloudflareEnv.TOGETHER_API_BASE_URL || 'https://api.together.xyz/v1';
 
 
 
 
 
57
  case 'OpenAILike':
58
- return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
59
  case 'LMStudio':
60
- return env.LMSTUDIO_API_BASE_URL || cloudflareEnv.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
 
 
61
  case 'Ollama': {
62
- let baseUrl = env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || 'http://localhost:11434';
 
63
 
64
  if (env.RUNNING_IN_DOCKER === 'true') {
65
  baseUrl = baseUrl.replace('localhost', 'host.docker.internal');
 
3
  * Preventing TS checks with files presented in the video for a better presentation.
4
  */
5
  import { env } from 'node:process';
6
+ import type { IProviderSetting } from '~/types/model';
7
 
8
  export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Record<string, string>) {
9
  /**
 
51
  }
52
  }
53
 
54
+ export function getBaseURL(cloudflareEnv: Env, provider: string, providerSettings?: Record<string, IProviderSetting>) {
55
+ let settingBaseUrl = providerSettings?.[provider].baseUrl;
56
+
57
+ if (settingBaseUrl && settingBaseUrl.length == 0) {
58
+ settingBaseUrl = undefined;
59
+ }
60
+
61
  switch (provider) {
62
  case 'Together':
63
+ return (
64
+ settingBaseUrl ||
65
+ env.TOGETHER_API_BASE_URL ||
66
+ cloudflareEnv.TOGETHER_API_BASE_URL ||
67
+ 'https://api.together.xyz/v1'
68
+ );
69
  case 'OpenAILike':
70
+ return settingBaseUrl || env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
71
  case 'LMStudio':
72
+ return (
73
+ settingBaseUrl || env.LMSTUDIO_API_BASE_URL || cloudflareEnv.LMSTUDIO_API_BASE_URL || 'http://localhost:1234'
74
+ );
75
  case 'Ollama': {
76
+ let baseUrl =
77
+ settingBaseUrl || env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || 'http://localhost:11434';
78
 
79
  if (env.RUNNING_IN_DOCKER === 'true') {
80
  baseUrl = baseUrl.replace('localhost', 'host.docker.internal');
app/lib/.server/llm/model.ts CHANGED
@@ -84,6 +84,8 @@ export function getHuggingFaceModel(apiKey: OptionalApiKey, model: string) {
84
  }
85
 
86
  export function getOllamaModel(baseURL: string, model: string) {
 
 
87
  const ollamaInstance = ollama(model, {
88
  numCtx: DEFAULT_NUM_CTX,
89
  }) as LanguageModelV1 & { config: any };
@@ -140,7 +142,7 @@ export function getPerplexityModel(apiKey: OptionalApiKey, model: string) {
140
  export function getModel(
141
  provider: string,
142
  model: string,
143
- env: Env,
144
  apiKeys?: Record<string, string>,
145
  providerSettings?: Record<string, IProviderSetting>,
146
  ) {
@@ -148,9 +150,12 @@ export function getModel(
148
  * let apiKey; // Declare first
149
  * let baseURL;
150
  */
 
 
 
 
151
 
152
- const apiKey = getAPIKey(env, provider, apiKeys); // Then assign
153
- const baseURL = providerSettings?.[provider].baseUrl || getBaseURL(env, provider);
154
 
155
  switch (provider) {
156
  case 'Anthropic':
 
84
  }
85
 
86
  export function getOllamaModel(baseURL: string, model: string) {
87
+ console.log({ baseURL, model });
88
+
89
  const ollamaInstance = ollama(model, {
90
  numCtx: DEFAULT_NUM_CTX,
91
  }) as LanguageModelV1 & { config: any };
 
142
  export function getModel(
143
  provider: string,
144
  model: string,
145
+ serverEnv: Env,
146
  apiKeys?: Record<string, string>,
147
  providerSettings?: Record<string, IProviderSetting>,
148
  ) {
 
150
  * let apiKey; // Declare first
151
  * let baseURL;
152
  */
153
+ // console.log({provider,model});
154
+
155
+ const apiKey = getAPIKey(serverEnv, provider, apiKeys); // Then assign
156
+ const baseURL = getBaseURL(serverEnv, provider, providerSettings);
157
 
158
+ // console.log({apiKey,baseURL});
 
159
 
160
  switch (provider) {
161
  case 'Anthropic':
app/lib/.server/llm/stream-text.ts CHANGED
@@ -151,10 +151,13 @@ export async function streamText(props: {
151
  providerSettings?: Record<string, IProviderSetting>;
152
  promptId?: string;
153
  }) {
154
- const { messages, env, options, apiKeys, files, providerSettings, promptId } = props;
 
 
 
155
  let currentModel = DEFAULT_MODEL;
156
  let currentProvider = DEFAULT_PROVIDER.name;
157
- const MODEL_LIST = await getModelList(apiKeys || {}, providerSettings);
158
  const processedMessages = messages.map((message) => {
159
  if (message.role === 'user') {
160
  const { model, provider, content } = extractPropertiesFromMessage(message);
@@ -196,7 +199,7 @@ export async function streamText(props: {
196
  }
197
 
198
  return _streamText({
199
- model: getModel(currentProvider, currentModel, env, apiKeys, providerSettings) as any,
200
  system: systemPrompt,
201
  maxTokens: dynamicMaxTokens,
202
  messages: convertToCoreMessages(processedMessages as any),
 
151
  providerSettings?: Record<string, IProviderSetting>;
152
  promptId?: string;
153
  }) {
154
+ const { messages, env: serverEnv, options, apiKeys, files, providerSettings, promptId } = props;
155
+
156
+ // console.log({serverEnv});
157
+
158
  let currentModel = DEFAULT_MODEL;
159
  let currentProvider = DEFAULT_PROVIDER.name;
160
+ const MODEL_LIST = await getModelList({ apiKeys, providerSettings, serverEnv: serverEnv as any });
161
  const processedMessages = messages.map((message) => {
162
  if (message.role === 'user') {
163
  const { model, provider, content } = extractPropertiesFromMessage(message);
 
199
  }
200
 
201
  return _streamText({
202
+ model: getModel(currentProvider, currentModel, serverEnv, apiKeys, providerSettings) as any,
203
  system: systemPrompt,
204
  maxTokens: dynamicMaxTokens,
205
  messages: convertToCoreMessages(processedMessages as any),
app/types/model.ts CHANGED
@@ -3,7 +3,11 @@ import type { ModelInfo } from '~/utils/types';
3
  export type ProviderInfo = {
4
  staticModels: ModelInfo[];
5
  name: string;
6
- getDynamicModels?: (apiKeys?: Record<string, string>, providerSettings?: IProviderSetting) => Promise<ModelInfo[]>;
 
 
 
 
7
  getApiKeyLink?: string;
8
  labelForGetApiKey?: string;
9
  icon?: string;
 
3
  export type ProviderInfo = {
4
  staticModels: ModelInfo[];
5
  name: string;
6
+ getDynamicModels?: (
7
+ apiKeys?: Record<string, string>,
8
+ providerSettings?: IProviderSetting,
9
+ serverEnv?: Record<string, string>,
10
+ ) => Promise<ModelInfo[]>;
11
  getApiKeyLink?: string;
12
  labelForGetApiKey?: string;
13
  icon?: string;
app/utils/constants.ts CHANGED
@@ -220,7 +220,6 @@ const PROVIDER_LIST: ProviderInfo[] = [
220
  ],
221
  getApiKeyLink: 'https://huggingface.co/settings/tokens',
222
  },
223
-
224
  {
225
  name: 'OpenAI',
226
  staticModels: [
@@ -325,26 +324,46 @@ const staticModels: ModelInfo[] = PROVIDER_LIST.map((p) => p.staticModels).flat(
325
 
326
  export let MODEL_LIST: ModelInfo[] = [...staticModels];
327
 
328
- export async function getModelList(
329
- apiKeys: Record<string, string>,
330
- providerSettings?: Record<string, IProviderSetting>,
331
- ) {
 
 
 
 
332
  MODEL_LIST = [
333
  ...(
334
  await Promise.all(
335
  PROVIDER_LIST.filter(
336
  (p): p is ProviderInfo & { getDynamicModels: () => Promise<ModelInfo[]> } => !!p.getDynamicModels,
337
- ).map((p) => p.getDynamicModels(apiKeys, providerSettings?.[p.name])),
338
  )
339
  ).flat(),
340
  ...staticModels,
341
  ];
 
342
  return MODEL_LIST;
343
  }
344
 
345
- async function getTogetherModels(apiKeys?: Record<string, string>, settings?: IProviderSetting): Promise<ModelInfo[]> {
 
 
 
 
346
  try {
347
- const baseUrl = settings?.baseUrl || import.meta.env.TOGETHER_API_BASE_URL || '';
 
 
 
 
 
 
 
 
 
 
 
348
  const provider = 'Together';
349
 
350
  if (!baseUrl) {
@@ -383,8 +402,19 @@ async function getTogetherModels(apiKeys?: Record<string, string>, settings?: IP
383
  }
384
  }
385
 
386
- const getOllamaBaseUrl = (settings?: IProviderSetting) => {
387
- const defaultBaseUrl = settings?.baseUrl || import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434';
 
 
 
 
 
 
 
 
 
 
 
388
 
389
  // Check if we're in the browser
390
  if (typeof window !== 'undefined') {
@@ -398,9 +428,13 @@ const getOllamaBaseUrl = (settings?: IProviderSetting) => {
398
  return isDocker ? defaultBaseUrl.replace('localhost', 'host.docker.internal') : defaultBaseUrl;
399
  };
400
 
401
- async function getOllamaModels(apiKeys?: Record<string, string>, settings?: IProviderSetting): Promise<ModelInfo[]> {
 
 
 
 
402
  try {
403
- const baseUrl = getOllamaBaseUrl(settings);
404
  const response = await fetch(`${baseUrl}/api/tags`);
405
  const data = (await response.json()) as OllamaApiResponse;
406
 
@@ -421,9 +455,21 @@ async function getOllamaModels(apiKeys?: Record<string, string>, settings?: IPro
421
  async function getOpenAILikeModels(
422
  apiKeys?: Record<string, string>,
423
  settings?: IProviderSetting,
 
424
  ): Promise<ModelInfo[]> {
425
  try {
426
- const baseUrl = settings?.baseUrl || import.meta.env.OPENAI_LIKE_API_BASE_URL || '';
 
 
 
 
 
 
 
 
 
 
 
427
 
428
  if (!baseUrl) {
429
  return [];
@@ -486,9 +532,24 @@ async function getOpenRouterModels(): Promise<ModelInfo[]> {
486
  }));
487
  }
488
 
489
- async function getLMStudioModels(_apiKeys?: Record<string, string>, settings?: IProviderSetting): Promise<ModelInfo[]> {
 
 
 
 
490
  try {
491
- const baseUrl = settings?.baseUrl || import.meta.env.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
 
 
 
 
 
 
 
 
 
 
 
492
  const response = await fetch(`${baseUrl}/v1/models`);
493
  const data = (await response.json()) as any;
494
 
@@ -503,29 +564,37 @@ async function getLMStudioModels(_apiKeys?: Record<string, string>, settings?: I
503
  }
504
  }
505
 
506
- async function initializeModelList(providerSettings?: Record<string, IProviderSetting>): Promise<ModelInfo[]> {
507
- let apiKeys: Record<string, string> = {};
 
 
 
 
 
508
 
509
- try {
510
- const storedApiKeys = Cookies.get('apiKeys');
 
511
 
512
- if (storedApiKeys) {
513
- const parsedKeys = JSON.parse(storedApiKeys);
514
 
515
- if (typeof parsedKeys === 'object' && parsedKeys !== null) {
516
- apiKeys = parsedKeys;
 
517
  }
 
 
 
518
  }
519
- } catch (error: any) {
520
- logStore.logError('Failed to fetch API keys from cookies', error);
521
- logger.warn(`Failed to fetch apikeys from cookies: ${error?.message}`);
522
  }
 
523
  MODEL_LIST = [
524
  ...(
525
  await Promise.all(
526
  PROVIDER_LIST.filter(
527
  (p): p is ProviderInfo & { getDynamicModels: () => Promise<ModelInfo[]> } => !!p.getDynamicModels,
528
- ).map((p) => p.getDynamicModels(apiKeys, providerSettings?.[p.name])),
529
  )
530
  ).flat(),
531
  ...staticModels,
@@ -534,6 +603,7 @@ async function initializeModelList(providerSettings?: Record<string, IProviderSe
534
  return MODEL_LIST;
535
  }
536
 
 
537
  export {
538
  getOllamaModels,
539
  getOpenAILikeModels,
 
220
  ],
221
  getApiKeyLink: 'https://huggingface.co/settings/tokens',
222
  },
 
223
  {
224
  name: 'OpenAI',
225
  staticModels: [
 
324
 
325
  export let MODEL_LIST: ModelInfo[] = [...staticModels];
326
 
327
+ export async function getModelList(options: {
328
+ apiKeys?: Record<string, string>;
329
+ providerSettings?: Record<string, IProviderSetting>;
330
+ serverEnv?: Record<string, string>;
331
+ }) {
332
+ const { apiKeys, providerSettings, serverEnv } = options;
333
+
334
+ // console.log({ providerSettings, serverEnv,env:process.env });
335
  MODEL_LIST = [
336
  ...(
337
  await Promise.all(
338
  PROVIDER_LIST.filter(
339
  (p): p is ProviderInfo & { getDynamicModels: () => Promise<ModelInfo[]> } => !!p.getDynamicModels,
340
+ ).map((p) => p.getDynamicModels(apiKeys, providerSettings?.[p.name], serverEnv)),
341
  )
342
  ).flat(),
343
  ...staticModels,
344
  ];
345
+
346
  return MODEL_LIST;
347
  }
348
 
349
+ async function getTogetherModels(
350
+ apiKeys?: Record<string, string>,
351
+ settings?: IProviderSetting,
352
+ serverEnv: Record<string, string> = {},
353
+ ): Promise<ModelInfo[]> {
354
  try {
355
+ let settingsBaseUrl = settings?.baseUrl;
356
+
357
+ if (settingsBaseUrl && settingsBaseUrl.length == 0) {
358
+ settingsBaseUrl = undefined;
359
+ }
360
+
361
+ const baseUrl =
362
+ settingsBaseUrl ||
363
+ serverEnv?.TOGETHER_API_BASE_URL ||
364
+ process.env.TOGETHER_API_BASE_URL ||
365
+ import.meta.env.TOGETHER_API_BASE_URL ||
366
+ '';
367
  const provider = 'Together';
368
 
369
  if (!baseUrl) {
 
402
  }
403
  }
404
 
405
+ const getOllamaBaseUrl = (settings?: IProviderSetting, serverEnv: Record<string, string> = {}) => {
406
+ let settingsBaseUrl = settings?.baseUrl;
407
+
408
+ if (settingsBaseUrl && settingsBaseUrl.length == 0) {
409
+ settingsBaseUrl = undefined;
410
+ }
411
+
412
+ const defaultBaseUrl =
413
+ settings?.baseUrl ||
414
+ serverEnv?.OLLAMA_API_BASE_URL ||
415
+ process.env.OLLAMA_API_BASE_URL ||
416
+ import.meta.env.OLLAMA_API_BASE_URL ||
417
+ 'http://localhost:11434';
418
 
419
  // Check if we're in the browser
420
  if (typeof window !== 'undefined') {
 
428
  return isDocker ? defaultBaseUrl.replace('localhost', 'host.docker.internal') : defaultBaseUrl;
429
  };
430
 
431
+ async function getOllamaModels(
432
+ apiKeys?: Record<string, string>,
433
+ settings?: IProviderSetting,
434
+ serverEnv: Record<string, string> = {},
435
+ ): Promise<ModelInfo[]> {
436
  try {
437
+ const baseUrl = getOllamaBaseUrl(settings, serverEnv);
438
  const response = await fetch(`${baseUrl}/api/tags`);
439
  const data = (await response.json()) as OllamaApiResponse;
440
 
 
455
  async function getOpenAILikeModels(
456
  apiKeys?: Record<string, string>,
457
  settings?: IProviderSetting,
458
+ serverEnv: Record<string, string> = {},
459
  ): Promise<ModelInfo[]> {
460
  try {
461
+ let settingsBaseUrl = settings?.baseUrl;
462
+
463
+ if (settingsBaseUrl && settingsBaseUrl.length == 0) {
464
+ settingsBaseUrl = undefined;
465
+ }
466
+
467
+ const baseUrl =
468
+ settingsBaseUrl ||
469
+ serverEnv.OPENAI_LIKE_API_BASE_URL ||
470
+ process.env.OPENAI_LIKE_API_BASE_URL ||
471
+ import.meta.env.OPENAI_LIKE_API_BASE_URL ||
472
+ '';
473
 
474
  if (!baseUrl) {
475
  return [];
 
532
  }));
533
  }
534
 
535
+ async function getLMStudioModels(
536
+ _apiKeys?: Record<string, string>,
537
+ settings?: IProviderSetting,
538
+ serverEnv: Record<string, string> = {},
539
+ ): Promise<ModelInfo[]> {
540
  try {
541
+ let settingsBaseUrl = settings?.baseUrl;
542
+
543
+ if (settingsBaseUrl && settingsBaseUrl.length == 0) {
544
+ settingsBaseUrl = undefined;
545
+ }
546
+
547
+ const baseUrl =
548
+ settingsBaseUrl ||
549
+ serverEnv.LMSTUDIO_API_BASE_URL ||
550
+ process.env.LMSTUDIO_API_BASE_URL ||
551
+ import.meta.env.LMSTUDIO_API_BASE_URL ||
552
+ 'http://localhost:1234';
553
  const response = await fetch(`${baseUrl}/v1/models`);
554
  const data = (await response.json()) as any;
555
 
 
564
  }
565
  }
566
 
567
+ async function initializeModelList(options: {
568
+ env?: Record<string, string>;
569
+ providerSettings?: Record<string, IProviderSetting>;
570
+ apiKeys?: Record<string, string>;
571
+ }): Promise<ModelInfo[]> {
572
+ const { providerSettings, apiKeys: providedApiKeys, env } = options;
573
+ let apiKeys: Record<string, string> = providedApiKeys || {};
574
 
575
+ if (!providedApiKeys) {
576
+ try {
577
+ const storedApiKeys = Cookies.get('apiKeys');
578
 
579
+ if (storedApiKeys) {
580
+ const parsedKeys = JSON.parse(storedApiKeys);
581
 
582
+ if (typeof parsedKeys === 'object' && parsedKeys !== null) {
583
+ apiKeys = parsedKeys;
584
+ }
585
  }
586
+ } catch (error: any) {
587
+ logStore.logError('Failed to fetch API keys from cookies', error);
588
+ logger.warn(`Failed to fetch apikeys from cookies: ${error?.message}`);
589
  }
 
 
 
590
  }
591
+
592
  MODEL_LIST = [
593
  ...(
594
  await Promise.all(
595
  PROVIDER_LIST.filter(
596
  (p): p is ProviderInfo & { getDynamicModels: () => Promise<ModelInfo[]> } => !!p.getDynamicModels,
597
+ ).map((p) => p.getDynamicModels(apiKeys, providerSettings?.[p.name], env)),
598
  )
599
  ).flat(),
600
  ...staticModels,
 
603
  return MODEL_LIST;
604
  }
605
 
606
+ // initializeModelList({})
607
  export {
608
  getOllamaModels,
609
  getOpenAILikeModels,