atrokhym commited on
Commit
0741610
·
1 Parent(s): 70d14df

merge with upstream

Browse files
app/lib/.server/llm/model.ts CHANGED
@@ -22,7 +22,6 @@ export function getAnthropicModel(apiKey: string, model: string) {
22
  }
23
 
24
  export function getOpenAILikeModel(baseURL: string, apiKey: string, model: string) {
25
- // console.log('OpenAILike config:', { baseURL, hasApiKey: !!apiKey, model });
26
  const openai = createOpenAI({
27
  baseURL,
28
  apiKey,
@@ -132,8 +131,6 @@ export function getModel(provider: string, model: string, env: Env, apiKeys?: Re
132
  apiKey = getAPIKey(env, provider, apiKeys); // Then assign
133
  baseURL = getBaseURL(env, provider);
134
 
135
- // console.log('getModel inputs:', { provider, model, baseURL, hasApiKey: !!apiKey });
136
-
137
  switch (provider) {
138
  case 'Anthropic':
139
  return getAnthropicModel(apiKey, model);
 
22
  }
23
 
24
  export function getOpenAILikeModel(baseURL: string, apiKey: string, model: string) {
 
25
  const openai = createOpenAI({
26
  baseURL,
27
  apiKey,
 
131
  apiKey = getAPIKey(env, provider, apiKeys); // Then assign
132
  baseURL = getBaseURL(env, provider);
133
 
 
 
134
  switch (provider) {
135
  case 'Anthropic':
136
  return getAnthropicModel(apiKey, model);
app/lib/.server/llm/stream-text.ts CHANGED
@@ -52,12 +52,9 @@ function extractPropertiesFromMessage(message: Message): { model: string; provid
52
  })
53
  : textContent.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '');
54
 
55
- // console.log('Model from message:', model);
56
- // console.log('Found in MODEL_LIST:', MODEL_LIST.find((m) => m.name === model));
57
- // console.log('Current MODEL_LIST:', MODEL_LIST);
58
-
59
  return { model, provider, content: cleanedContent };
60
  }
 
61
  export function streamText(
62
  messages: Messages,
63
  env: Env,
@@ -79,20 +76,21 @@ export function streamText(
79
 
80
  return { ...message, content };
81
  }
82
-
83
- const modelDetails = MODEL_LIST.find((m) => m.name === currentModel);
84
-
85
- const dynamicMaxTokens =
86
- modelDetails && modelDetails.maxTokenAllowed
87
- ? modelDetails.maxTokenAllowed
88
- : MAX_TOKENS;
89
-
90
- return _streamText({
91
- model: getModel(currentProvider, currentModel, env, apiKeys),
92
- system: getSystemPrompt(),
93
- maxTokens: dynamicMaxTokens,
94
- messages: convertToCoreMessages(processedMessages),
95
- ...options,
96
- });
97
- }
98
- )}
 
 
52
  })
53
  : textContent.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '');
54
 
 
 
 
 
55
  return { model, provider, content: cleanedContent };
56
  }
57
+
58
  export function streamText(
59
  messages: Messages,
60
  env: Env,
 
76
 
77
  return { ...message, content };
78
  }
79
+ return message;
80
+ });
81
+
82
+ const modelDetails = MODEL_LIST.find((m) => m.name === currentModel);
83
+
84
+ const dynamicMaxTokens =
85
+ modelDetails && modelDetails.maxTokenAllowed
86
+ ? modelDetails.maxTokenAllowed
87
+ : MAX_TOKENS;
88
+
89
+ return _streamText({
90
+ ...options,
91
+ model: getModel(currentProvider, currentModel, env, apiKeys),
92
+ system: getSystemPrompt(),
93
+ maxTokens: dynamicMaxTokens,
94
+ messages: convertToCoreMessages(processedMessages),
95
+ });
96
+ }
app/routes/api.chat.ts CHANGED
@@ -37,8 +37,6 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
37
  model: string
38
  }>();
39
 
40
- // console.log('ChatAction:', JSON.stringify(messages));
41
-
42
  const cookieHeader = request.headers.get("Cookie");
43
 
44
  // Parse the cookie's value (returns an object or null if no cookie exists)
 
37
  model: string
38
  }>();
39
 
 
 
40
  const cookieHeader = request.headers.get("Cookie");
41
 
42
  // Parse the cookie's value (returns an object or null if no cookie exists)
app/utils/constants.ts CHANGED
@@ -32,7 +32,7 @@ const PROVIDER_LIST: ProviderInfo[] = [
32
  name: 'OpenAILike',
33
  staticModels: [
34
  { name: 'o1-mini', label: 'o1-mini', provider: 'OpenAILike' },
35
- { name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI' },
36
  ],
37
  getDynamicModels: getOpenAILikeModels
38
  },
 
32
  name: 'OpenAILike',
33
  staticModels: [
34
  { name: 'o1-mini', label: 'o1-mini', provider: 'OpenAILike' },
35
+ { name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAILike' },
36
  ],
37
  getDynamicModels: getOpenAILikeModels
38
  },