TommyAI commited on
Commit
074e2f3
·
1 Parent(s): a6d81b1

Moved provider and setProvider variables to the higher level component so that it can be accessed in sendMessage.

Browse files
app/components/chat/BaseChat.tsx CHANGED
@@ -24,8 +24,7 @@ const EXAMPLE_PROMPTS = [
24
 
25
  const providerList = [...new Set(MODEL_LIST.map((model) => model.provider))]
26
 
27
- const ModelSelector = ({ model, setModel, modelList, providerList }) => {
28
- const [provider, setProvider] = useState(DEFAULT_PROVIDER);
29
  return (
30
  <div className="mb-2">
31
  <select
@@ -79,6 +78,8 @@ interface BaseChatProps {
79
  input?: string;
80
  model: string;
81
  setModel: (model: string) => void;
 
 
82
  handleStop?: () => void;
83
  sendMessage?: (event: React.UIEvent, messageInput?: string) => void;
84
  handleInputChange?: (event: React.ChangeEvent<HTMLTextAreaElement>) => void;
@@ -100,6 +101,8 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
100
  input = '',
101
  model,
102
  setModel,
 
 
103
  sendMessage,
104
  handleInputChange,
105
  enhancePrompt,
@@ -157,6 +160,8 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
157
  model={model}
158
  setModel={setModel}
159
  modelList={MODEL_LIST}
 
 
160
  providerList={providerList}
161
  />
162
  <div
 
24
 
25
  const providerList = [...new Set(MODEL_LIST.map((model) => model.provider))]
26
 
27
+ const ModelSelector = ({ model, setModel, provider, setProvider, modelList, providerList }) => {
 
28
  return (
29
  <div className="mb-2">
30
  <select
 
78
  input?: string;
79
  model: string;
80
  setModel: (model: string) => void;
81
+ provider: string;
82
+ setProvider: (provider: string) => void;
83
  handleStop?: () => void;
84
  sendMessage?: (event: React.UIEvent, messageInput?: string) => void;
85
  handleInputChange?: (event: React.ChangeEvent<HTMLTextAreaElement>) => void;
 
101
  input = '',
102
  model,
103
  setModel,
104
+ provider,
105
+ setProvider,
106
  sendMessage,
107
  handleInputChange,
108
  enhancePrompt,
 
160
  model={model}
161
  setModel={setModel}
162
  modelList={MODEL_LIST}
163
+ provider={provider}
164
+ setProvider={setProvider}
165
  providerList={providerList}
166
  />
167
  <div
app/components/chat/Chat.client.tsx CHANGED
@@ -11,7 +11,7 @@ import { useChatHistory } from '~/lib/persistence';
11
  import { chatStore } from '~/lib/stores/chat';
12
  import { workbenchStore } from '~/lib/stores/workbench';
13
  import { fileModificationsToHTML } from '~/utils/diff';
14
- import { DEFAULT_MODEL } from '~/utils/constants';
15
  import { cubicEasingFn } from '~/utils/easings';
16
  import { createScopedLogger, renderLogger } from '~/utils/logger';
17
  import { BaseChat } from './BaseChat';
@@ -74,6 +74,7 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
74
 
75
  const [chatStarted, setChatStarted] = useState(initialMessages.length > 0);
76
  const [model, setModel] = useState(DEFAULT_MODEL);
 
77
 
78
  const { showChat } = useStore(chatStore);
79
 
@@ -182,7 +183,7 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
182
  * manually reset the input and we'd have to manually pass in file attachments. However, those
183
  * aren't relevant here.
184
  */
185
- append({ role: 'user', content: `[Model: ${model}]\n\n${diff}\n\n${_input}` });
186
 
187
  /**
188
  * After sending a new message we reset all modifications since the model
@@ -190,7 +191,7 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
190
  */
191
  workbenchStore.resetAllFileModifications();
192
  } else {
193
- append({ role: 'user', content: `[Model: ${model}]\n\n${_input}` });
194
  }
195
 
196
  setInput('');
@@ -215,6 +216,8 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
215
  sendMessage={sendMessage}
216
  model={model}
217
  setModel={setModel}
 
 
218
  messageRef={messageRef}
219
  scrollRef={scrollRef}
220
  handleInputChange={handleInputChange}
 
11
  import { chatStore } from '~/lib/stores/chat';
12
  import { workbenchStore } from '~/lib/stores/workbench';
13
  import { fileModificationsToHTML } from '~/utils/diff';
14
+ import { DEFAULT_MODEL, DEFAULT_PROVIDER } from '~/utils/constants';
15
  import { cubicEasingFn } from '~/utils/easings';
16
  import { createScopedLogger, renderLogger } from '~/utils/logger';
17
  import { BaseChat } from './BaseChat';
 
74
 
75
  const [chatStarted, setChatStarted] = useState(initialMessages.length > 0);
76
  const [model, setModel] = useState(DEFAULT_MODEL);
77
+ const [provider, setProvider] = useState(DEFAULT_PROVIDER);
78
 
79
  const { showChat } = useStore(chatStore);
80
 
 
183
  * manually reset the input and we'd have to manually pass in file attachments. However, those
184
  * aren't relevant here.
185
  */
186
+ append({ role: 'user', content: `[Model: ${model}]\n\n[Provider: ${provider}]\n\n${diff}\n\n${_input}` });
187
 
188
  /**
189
  * After sending a new message we reset all modifications since the model
 
191
  */
192
  workbenchStore.resetAllFileModifications();
193
  } else {
194
+ append({ role: 'user', content: `[Model: ${model}]\n\n[Provider: ${provider}]\n\n${_input}` });
195
  }
196
 
197
  setInput('');
 
216
  sendMessage={sendMessage}
217
  model={model}
218
  setModel={setModel}
219
+ provider={provider}
220
+ setProvider={setProvider}
221
  messageRef={messageRef}
222
  scrollRef={scrollRef}
223
  handleInputChange={handleInputChange}
app/lib/.server/llm/stream-text.ts CHANGED
@@ -24,42 +24,51 @@ export type Messages = Message[];
24
 
25
  export type StreamingOptions = Omit<Parameters<typeof _streamText>[0], 'model'>;
26
 
27
- function extractModelFromMessage(message: Message): { model: string; content: string } {
28
  const modelRegex = /^\[Model: (.*?)\]\n\n/;
29
- const match = message.content.match(modelRegex);
30
 
31
- if (match) {
32
- const model = match[1];
33
- const content = message.content.replace(modelRegex, '');
34
- return { model, content };
35
- }
36
 
37
- // Default model if not specified
38
- return { model: DEFAULT_MODEL, content: message.content };
 
 
 
 
 
 
 
 
 
39
  }
40
 
41
  export function streamText(messages: Messages, env: Env, options?: StreamingOptions) {
42
  let currentModel = DEFAULT_MODEL;
 
 
43
  const processedMessages = messages.map((message) => {
44
  if (message.role === 'user') {
45
- const { model, content } = extractModelFromMessage(message);
46
- if (model && MODEL_LIST.find((m) => m.name === model)) {
47
- currentModel = model; // Update the current model
 
48
  }
 
 
 
49
  return { ...message, content };
50
  }
51
- return message;
52
- });
53
 
54
- const provider = MODEL_LIST.find((model) => model.name === currentModel)?.provider || DEFAULT_PROVIDER;
 
55
 
56
  return _streamText({
57
- model: getModel(provider, currentModel, env),
58
  system: getSystemPrompt(),
59
  maxTokens: MAX_TOKENS,
60
- // headers: {
61
- // 'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15',
62
- // },
63
  messages: convertToCoreMessages(processedMessages),
64
  ...options,
65
  });
 
24
 
25
  export type StreamingOptions = Omit<Parameters<typeof _streamText>[0], 'model'>;
26
 
27
+ function extractPropertiesFromMessage(message: Message): { model: string; provider: string; content: string } {
28
  const modelRegex = /^\[Model: (.*?)\]\n\n/;
29
+ const providerRegex = /\[Provider: (.*?)\]\n\n/;
30
 
31
+ // Extract model
32
+ const modelMatch = message.content.match(modelRegex);
33
+ const model = modelMatch ? modelMatch[1] : DEFAULT_MODEL;
 
 
34
 
35
+ // Extract provider
36
+ const providerMatch = message.content.match(providerRegex);
37
+ const provider = providerMatch ? providerMatch[1] : DEFAULT_PROVIDER;
38
+
39
+ // Remove model and provider lines from content
40
+ const cleanedContent = message.content
41
+ .replace(modelRegex, '')
42
+ .replace(providerRegex, '')
43
+ .trim();
44
+
45
+ return { model, provider, content: cleanedContent };
46
  }
47
 
48
  export function streamText(messages: Messages, env: Env, options?: StreamingOptions) {
49
  let currentModel = DEFAULT_MODEL;
50
+ let currentProvider = DEFAULT_PROVIDER;
51
+
52
  const processedMessages = messages.map((message) => {
53
  if (message.role === 'user') {
54
+ const { model, provider, content } = extractPropertiesFromMessage(message);
55
+
56
+ if (MODEL_LIST.find((m) => m.name === model)) {
57
+ currentModel = model;
58
  }
59
+
60
+ currentProvider = provider;
61
+
62
  return { ...message, content };
63
  }
 
 
64
 
65
+ return message; // No changes for non-user messages
66
+ });
67
 
68
  return _streamText({
69
+ model: getModel(currentProvider, currentModel, env),
70
  system: getSystemPrompt(),
71
  maxTokens: MAX_TOKENS,
 
 
 
72
  messages: convertToCoreMessages(processedMessages),
73
  ...options,
74
  });