atrokhym commited on
Commit
937ba7e
·
1 Parent(s): 302cd28

model pickup

Browse files
app/lib/.server/llm/stream-text.ts CHANGED
@@ -64,6 +64,8 @@ export function streamText(
64
  let currentModel = DEFAULT_MODEL;
65
  let currentProvider = DEFAULT_PROVIDER;
66
 
 
 
67
  const processedMessages = messages.map((message) => {
68
  if (message.role === 'user') {
69
  const { model, provider, content } = extractPropertiesFromMessage(message);
 
64
  let currentModel = DEFAULT_MODEL;
65
  let currentProvider = DEFAULT_PROVIDER;
66
 
67
+ console.log('StreamText:', JSON.stringify(messages));
68
+
69
  const processedMessages = messages.map((message) => {
70
  if (message.role === 'user') {
71
  const { model, provider, content } = extractPropertiesFromMessage(message);
app/routes/api.chat.ts CHANGED
@@ -31,11 +31,14 @@ function parseCookies(cookieHeader) {
31
 
32
  async function chatAction({ context, request }: ActionFunctionArgs) {
33
 
34
- const { messages, imageData } = await request.json<{
35
  messages: Messages,
36
- imageData?: string[]
 
37
  }>();
38
 
 
 
39
  const cookieHeader = request.headers.get("Cookie");
40
 
41
  // Parse the cookie's value (returns an object or null if no cookie exists)
@@ -47,6 +50,7 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
47
  const options: StreamingOptions = {
48
  toolChoice: 'none',
49
  apiKeys,
 
50
  onFinish: async ({ text: content, finishReason }) => {
51
  if (finishReason !== 'length') {
52
  return stream.close();
 
31
 
32
  async function chatAction({ context, request }: ActionFunctionArgs) {
33
 
34
+ const { messages, imageData, model } = await request.json<{
35
  messages: Messages,
36
+ imageData?: string[],
37
+ model: string
38
  }>();
39
 
40
+ console.log('ChatAction:', JSON.stringify(messages));
41
+
42
  const cookieHeader = request.headers.get("Cookie");
43
 
44
  // Parse the cookie's value (returns an object or null if no cookie exists)
 
50
  const options: StreamingOptions = {
51
  toolChoice: 'none',
52
  apiKeys,
53
+ model,
54
  onFinish: async ({ text: content, finishReason }) => {
55
  if (finishReason !== 'length') {
56
  return stream.close();
app/utils/constants.ts CHANGED
@@ -30,13 +30,15 @@ const PROVIDER_LIST: ProviderInfo[] = [
30
  icon: "i-ph:cloud-arrow-down",
31
  }, {
32
  name: 'OpenAILike',
33
- staticModels: [],
 
 
34
  getDynamicModels: getOpenAILikeModels
35
  },
36
  {
37
  name: 'OpenRouter',
38
  staticModels: [
39
- { name: 'gpt-4o', label: 'GPT-4o', provider: 'OpenAI' },
40
  {
41
  name: 'anthropic/claude-3.5-sonnet',
42
  label: 'Anthropic: Claude 3.5 Sonnet (OpenRouter)',
 
30
  icon: "i-ph:cloud-arrow-down",
31
  }, {
32
  name: 'OpenAILike',
33
+ staticModels: [
34
+ { name: 'o1-mini', label: 'o1-mini', provider: 'OpenAILike' },
35
+ ],
36
  getDynamicModels: getOpenAILikeModels
37
  },
38
  {
39
  name: 'OpenRouter',
40
  staticModels: [
41
+ { name: 'gpt-4o', label: 'GPT-4o', provider: 'OpenRouter' },
42
  {
43
  name: 'anthropic/claude-3.5-sonnet',
44
  label: 'Anthropic: Claude 3.5 Sonnet (OpenRouter)',