codacus commited on
Commit
7efad13
·
1 Parent(s): 5ead479
app/lib/.server/llm/stream-text.ts CHANGED
@@ -58,10 +58,15 @@ function extractPropertiesFromMessage(message: Message): { model: string; provid
58
  return { model, provider, content: cleanedContent };
59
  }
60
 
61
- export async function streamText(messages: Messages, env: Env, options?: StreamingOptions,apiKeys?: Record<string, string>) {
 
 
 
 
 
62
  let currentModel = DEFAULT_MODEL;
63
  let currentProvider = DEFAULT_PROVIDER.name;
64
- const MODEL_LIST = await getModelList(apiKeys||{});
65
  const processedMessages = messages.map((message) => {
66
  if (message.role === 'user') {
67
  const { model, provider, content } = extractPropertiesFromMessage(message);
@@ -69,6 +74,7 @@ export async function streamText(messages: Messages, env: Env, options?: Streami
69
  if (MODEL_LIST.find((m) => m.name === model)) {
70
  currentModel = model;
71
  }
 
72
  currentProvider = provider;
73
 
74
  return { ...message, content };
 
58
  return { model, provider, content: cleanedContent };
59
  }
60
 
61
+ export async function streamText(
62
+ messages: Messages,
63
+ env: Env,
64
+ options?: StreamingOptions,
65
+ apiKeys?: Record<string, string>,
66
+ ) {
67
  let currentModel = DEFAULT_MODEL;
68
  let currentProvider = DEFAULT_PROVIDER.name;
69
+ const MODEL_LIST = await getModelList(apiKeys || {});
70
  const processedMessages = messages.map((message) => {
71
  if (message.role === 'user') {
72
  const { model, provider, content } = extractPropertiesFromMessage(message);
 
74
  if (MODEL_LIST.find((m) => m.name === model)) {
75
  currentModel = model;
76
  }
77
+
78
  currentProvider = provider;
79
 
80
  return { ...message, content };
app/routes/api.chat.ts CHANGED
@@ -8,8 +8,8 @@ export async function action(args: ActionFunctionArgs) {
8
  return chatAction(args);
9
  }
10
 
11
- function parseCookies(cookieHeader:string) {
12
- const cookies:any = {};
13
 
14
  // Split the cookie string by semicolons and spaces
15
  const items = cookieHeader.split(';').map((cookie) => cookie.trim());
@@ -29,7 +29,7 @@ function parseCookies(cookieHeader:string) {
29
  }
30
 
31
  async function chatAction({ context, request }: ActionFunctionArgs) {
32
- const { messages, model } = await request.json<{
33
  messages: Messages;
34
  model: string;
35
  }>();
@@ -37,7 +37,7 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
37
  const cookieHeader = request.headers.get('Cookie');
38
 
39
  // Parse the cookie's value (returns an object or null if no cookie exists)
40
- const apiKeys = JSON.parse(parseCookies(cookieHeader||"").apiKeys || '{}');
41
 
42
  const stream = new SwitchableStream();
43
 
@@ -60,7 +60,7 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
60
  messages.push({ role: 'assistant', content });
61
  messages.push({ role: 'user', content: CONTINUE_PROMPT });
62
 
63
- const result = await streamText(messages, context.cloudflare.env, options,apiKeys);
64
 
65
  return stream.switchSource(result.toAIStream());
66
  },
@@ -76,7 +76,7 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
76
  contentType: 'text/plain; charset=utf-8',
77
  },
78
  });
79
- } catch (error:any) {
80
  console.log(error);
81
 
82
  if (error.message?.includes('API key')) {
 
8
  return chatAction(args);
9
  }
10
 
11
+ function parseCookies(cookieHeader: string) {
12
+ const cookies: any = {};
13
 
14
  // Split the cookie string by semicolons and spaces
15
  const items = cookieHeader.split(';').map((cookie) => cookie.trim());
 
29
  }
30
 
31
  async function chatAction({ context, request }: ActionFunctionArgs) {
32
+ const { messages } = await request.json<{
33
  messages: Messages;
34
  model: string;
35
  }>();
 
37
  const cookieHeader = request.headers.get('Cookie');
38
 
39
  // Parse the cookie's value (returns an object or null if no cookie exists)
40
+ const apiKeys = JSON.parse(parseCookies(cookieHeader || '').apiKeys || '{}');
41
 
42
  const stream = new SwitchableStream();
43
 
 
60
  messages.push({ role: 'assistant', content });
61
  messages.push({ role: 'user', content: CONTINUE_PROMPT });
62
 
63
+ const result = await streamText(messages, context.cloudflare.env, options, apiKeys);
64
 
65
  return stream.switchSource(result.toAIStream());
66
  },
 
76
  contentType: 'text/plain; charset=utf-8',
77
  },
78
  });
79
+ } catch (error: any) {
80
  console.log(error);
81
 
82
  if (error.message?.includes('API key')) {
app/utils/constants.ts CHANGED
@@ -263,7 +263,7 @@ const PROVIDER_LIST: ProviderInfo[] = [
263
  },
264
  {
265
  name: 'Together',
266
- getDynamicModels: getTogetherModels,
267
  staticModels: [
268
  {
269
  name: 'Qwen/Qwen2.5-Coder-32B-Instruct',
@@ -295,7 +295,6 @@ const staticModels: ModelInfo[] = PROVIDER_LIST.map((p) => p.staticModels).flat(
295
 
296
  export let MODEL_LIST: ModelInfo[] = [...staticModels];
297
 
298
-
299
  export async function getModelList(apiKeys: Record<string, string>) {
300
  MODEL_LIST = [
301
  ...(
@@ -312,43 +311,44 @@ export async function getModelList(apiKeys: Record<string, string>) {
312
 
313
  async function getTogetherModels(apiKeys?: Record<string, string>): Promise<ModelInfo[]> {
314
  try {
315
- let baseUrl = import.meta.env.TOGETHER_API_BASE_URL || '';
316
- let provider='Together'
317
 
318
- if (!baseUrl) {
319
- return [];
320
- }
321
- let apiKey = import.meta.env.OPENAI_LIKE_API_KEY ?? ''
322
 
323
- if (apiKeys && apiKeys[provider]) {
324
- apiKey = apiKeys[provider];
325
- }
 
 
326
 
327
- if (!apiKey) {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
328
  return [];
329
  }
330
-
331
- const response = await fetch(`${baseUrl}/models`, {
332
- headers: {
333
- Authorization: `Bearer ${apiKey}`,
334
- },
335
- });
336
- const res = (await response.json()) as any;
337
- let data: any[] = (res || []).filter((model: any) => model.type=='chat')
338
- return data.map((m: any) => ({
339
- name: m.id,
340
- label: `${m.display_name} - in:$${(m.pricing.input).toFixed(
341
- 2,
342
- )} out:$${(m.pricing.output).toFixed(2)} - context ${Math.floor(m.context_length / 1000)}k`,
343
- provider: provider,
344
- maxTokenAllowed: 8000,
345
- }));
346
- } catch (e) {
347
- console.error('Error getting OpenAILike models:', e);
348
- return [];
349
  }
350
- }
351
-
352
 
353
  const getOllamaBaseUrl = () => {
354
  const defaultBaseUrl = import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434';
@@ -396,11 +396,13 @@ async function getOpenAILikeModels(): Promise<ModelInfo[]> {
396
  if (!baseUrl) {
397
  return [];
398
  }
 
399
  let apiKey = import.meta.env.OPENAI_LIKE_API_KEY ?? '';
400
-
401
- let apikeys = JSON.parse(Cookies.get('apiKeys')||'{}')
402
- if (apikeys && apikeys['OpenAILike']){
403
- apiKey = apikeys['OpenAILike'];
 
404
  }
405
 
406
  const response = await fetch(`${baseUrl}/models`, {
@@ -458,6 +460,7 @@ async function getLMStudioModels(): Promise<ModelInfo[]> {
458
  if (typeof window === 'undefined') {
459
  return [];
460
  }
 
461
  try {
462
  const baseUrl = import.meta.env.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
463
  const response = await fetch(`${baseUrl}/v1/models`);
@@ -476,6 +479,7 @@ async function getLMStudioModels(): Promise<ModelInfo[]> {
476
 
477
  async function initializeModelList(): Promise<ModelInfo[]> {
478
  let apiKeys: Record<string, string> = {};
 
479
  try {
480
  const storedApiKeys = Cookies.get('apiKeys');
481
 
@@ -486,9 +490,8 @@ async function initializeModelList(): Promise<ModelInfo[]> {
486
  apiKeys = parsedKeys;
487
  }
488
  }
489
-
490
- } catch (error) {
491
-
492
  }
493
  MODEL_LIST = [
494
  ...(
@@ -500,6 +503,7 @@ async function initializeModelList(): Promise<ModelInfo[]> {
500
  ).flat(),
501
  ...staticModels,
502
  ];
 
503
  return MODEL_LIST;
504
  }
505
 
 
263
  },
264
  {
265
  name: 'Together',
266
+ getDynamicModels: getTogetherModels,
267
  staticModels: [
268
  {
269
  name: 'Qwen/Qwen2.5-Coder-32B-Instruct',
 
295
 
296
  export let MODEL_LIST: ModelInfo[] = [...staticModels];
297
 
 
298
  export async function getModelList(apiKeys: Record<string, string>) {
299
  MODEL_LIST = [
300
  ...(
 
311
 
312
  async function getTogetherModels(apiKeys?: Record<string, string>): Promise<ModelInfo[]> {
313
  try {
314
+ const baseUrl = import.meta.env.TOGETHER_API_BASE_URL || '';
315
+ const provider = 'Together';
316
 
317
+ if (!baseUrl) {
318
+ return [];
319
+ }
 
320
 
321
+ let apiKey = import.meta.env.OPENAI_LIKE_API_KEY ?? '';
322
+
323
+ if (apiKeys && apiKeys[provider]) {
324
+ apiKey = apiKeys[provider];
325
+ }
326
 
327
+ if (!apiKey) {
328
+ return [];
329
+ }
330
+
331
+ const response = await fetch(`${baseUrl}/models`, {
332
+ headers: {
333
+ Authorization: `Bearer ${apiKey}`,
334
+ },
335
+ });
336
+ const res = (await response.json()) as any;
337
+ const data: any[] = (res || []).filter((model: any) => model.type == 'chat');
338
+
339
+ return data.map((m: any) => ({
340
+ name: m.id,
341
+ label: `${m.display_name} - in:$${m.pricing.input.toFixed(
342
+ 2,
343
+ )} out:$${m.pricing.output.toFixed(2)} - context ${Math.floor(m.context_length / 1000)}k`,
344
+ provider,
345
+ maxTokenAllowed: 8000,
346
+ }));
347
+ } catch (e) {
348
+ console.error('Error getting OpenAILike models:', e);
349
  return [];
350
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
351
  }
 
 
352
 
353
  const getOllamaBaseUrl = () => {
354
  const defaultBaseUrl = import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434';
 
396
  if (!baseUrl) {
397
  return [];
398
  }
399
+
400
  let apiKey = import.meta.env.OPENAI_LIKE_API_KEY ?? '';
401
+
402
+ const apikeys = JSON.parse(Cookies.get('apiKeys') || '{}');
403
+
404
+ if (apikeys && apikeys.OpenAILike) {
405
+ apiKey = apikeys.OpenAILike;
406
  }
407
 
408
  const response = await fetch(`${baseUrl}/models`, {
 
460
  if (typeof window === 'undefined') {
461
  return [];
462
  }
463
+
464
  try {
465
  const baseUrl = import.meta.env.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
466
  const response = await fetch(`${baseUrl}/v1/models`);
 
479
 
480
  async function initializeModelList(): Promise<ModelInfo[]> {
481
  let apiKeys: Record<string, string> = {};
482
+
483
  try {
484
  const storedApiKeys = Cookies.get('apiKeys');
485
 
 
490
  apiKeys = parsedKeys;
491
  }
492
  }
493
+ } catch (error: any) {
494
+ console.warn(`Failed to fetch apikeys from cookies:${error?.message}`);
 
495
  }
496
  MODEL_LIST = [
497
  ...(
 
503
  ).flat(),
504
  ...staticModels,
505
  ];
506
+
507
  return MODEL_LIST;
508
  }
509