Karrot commited on
Commit
4edcc5e
·
1 Parent(s): 8e7220e

LM Studio Integration

Browse files
.env.example CHANGED
@@ -40,5 +40,10 @@ OPENAI_LIKE_API_KEY=
40
  # You only need this environment variable set if you want to use Mistral models
41
  MISTRAL_API_KEY=
42
 
 
 
 
 
 
43
  # Include this environment variable if you want more logging for debugging locally
44
  VITE_LOG_LEVEL=debug
 
40
  # You only need this environment variable set if you want to use Mistral models
41
  MISTRAL_API_KEY=
42
 
43
+ # Get LMStudio Base URL from LM Studio Developer Console
44
+ # Make sure to enable CORS
45
+ # Example: http://localhost:1234
46
+ LMSTUDIO_API_BASE_URL=
47
+
48
  # Include this environment variable if you want more logging for debugging locally
49
  VITE_LOG_LEVEL=debug
app/components/chat/BaseChat.tsx CHANGED
@@ -48,6 +48,9 @@ const ModelSelector = ({ model, setModel, modelList, providerList }) => {
48
  <option key="OpenAILike" value="OpenAILike">
49
  OpenAILike
50
  </option>
 
 
 
51
  </select>
52
  <select
53
  value={model}
 
48
  <option key="OpenAILike" value="OpenAILike">
49
  OpenAILike
50
  </option>
51
+ <option key="LMStudio" value="LMStudio">
52
+ LMStudio
53
+ </option>
54
  </select>
55
  <select
56
  value={model}
app/lib/.server/llm/api-key.ts CHANGED
@@ -34,6 +34,8 @@ export function getBaseURL(cloudflareEnv: Env, provider: string) {
34
  switch (provider) {
35
  case 'OpenAILike':
36
  return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
 
 
37
  case 'Ollama':
38
  return env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || "http://localhost:11434";
39
  default:
 
34
  switch (provider) {
35
  case 'OpenAILike':
36
  return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
37
+ case 'LMStudio':
38
+ return env.LMSTUDIO_API_BASE_URL || cloudflareEnv.LMSTUDIO_API_BASE_URL || "http://localhost:1234";
39
  case 'Ollama':
40
  return env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || "http://localhost:11434";
41
  default:
app/lib/.server/llm/model.ts CHANGED
@@ -80,6 +80,14 @@ export function getOpenRouterModel(apiKey: string, model: string) {
80
  return openRouter.chat(model);
81
  }
82
 
 
 
 
 
 
 
 
 
83
  export function getModel(provider: string, model: string, env: Env) {
84
  const apiKey = getAPIKey(env, provider);
85
  const baseURL = getBaseURL(env, provider);
@@ -94,13 +102,15 @@ export function getModel(provider: string, model: string, env: Env) {
94
  case 'OpenRouter':
95
  return getOpenRouterModel(apiKey, model);
96
  case 'Google':
97
- return getGoogleModel(apiKey, model)
98
  case 'OpenAILike':
99
  return getOpenAILikeModel(baseURL,apiKey, model);
100
  case 'Deepseek':
101
- return getDeepseekModel(apiKey, model)
102
  case 'Mistral':
103
  return getMistralModel(apiKey, model);
 
 
104
  default:
105
  return getOllamaModel(baseURL, model);
106
  }
 
80
  return openRouter.chat(model);
81
  }
82
 
83
+ export function getLMStudioModel(baseURL: string, model: string) {
84
+ const lmstudio = createOpenAI({
85
+ baseUrl: 'http://localhost:1234/v1',
86
+ });
87
+
88
+ return lmstudio(model);
89
+ }
90
+
91
  export function getModel(provider: string, model: string, env: Env) {
92
  const apiKey = getAPIKey(env, provider);
93
  const baseURL = getBaseURL(env, provider);
 
102
  case 'OpenRouter':
103
  return getOpenRouterModel(apiKey, model);
104
  case 'Google':
105
+ return getGoogleModel(apiKey, model);
106
  case 'OpenAILike':
107
  return getOpenAILikeModel(baseURL,apiKey, model);
108
  case 'Deepseek':
109
+ return getDeepseekModel(apiKey, model);
110
  case 'Mistral':
111
  return getMistralModel(apiKey, model);
112
+ case 'LMStudio':
113
+ return getLMStudioModel(baseURL, model);
114
  default:
115
  return getOllamaModel(baseURL, model);
116
  }
app/utils/constants.ts CHANGED
@@ -86,10 +86,28 @@ async function getOpenAILikeModels(): Promise<ModelInfo[]> {
86
  }
87
 
88
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
89
  async function initializeModelList(): Promise<void> {
90
  const ollamaModels = await getOllamaModels();
91
  const openAiLikeModels = await getOpenAILikeModels();
92
- MODEL_LIST = [...ollamaModels,...openAiLikeModels, ...staticModels];
 
93
  }
94
  initializeModelList().then();
95
- export { getOllamaModels, getOpenAILikeModels, initializeModelList };
 
86
  }
87
 
88
  }
89
+
90
+ async function getLMStudioModels(): Promise<ModelInfo[]> {
91
+ try {
92
+ const base_url = import.meta.env.LMSTUDIO_API_BASE_URL || "http://localhost:1234";
93
+ const response = await fetch(`${base_url}/v1/models`);
94
+ const data = await response.json() as any;
95
+ return data.data.map((model: any) => ({
96
+ name: model.id,
97
+ label: model.id,
98
+ provider: 'LMStudio',
99
+ }));
100
+ } catch (e) {
101
+ return [];
102
+ }
103
+ }
104
+
105
+
106
  async function initializeModelList(): Promise<void> {
107
  const ollamaModels = await getOllamaModels();
108
  const openAiLikeModels = await getOpenAILikeModels();
109
+ const lmstudioModels = await getLMStudioModels();
110
+ MODEL_LIST = [...ollamaModels,...openAiLikeModels, ...staticModels,...lmstudioModels,];
111
  }
112
  initializeModelList().then();
113
+ export { getOllamaModels,getOpenAILikeModels,getLMStudioModels,initializeModelList };
package-lock.json ADDED
The diff for this file is too large to render. See raw diff
 
vite.config.ts CHANGED
@@ -27,7 +27,7 @@ export default defineConfig((config) => {
27
  chrome129IssuePlugin(),
28
  config.mode === 'production' && optimizeCssModules({ apply: 'build' }),
29
  ],
30
- envPrefix:["VITE_","OPENAI_LIKE_API_","OLLAMA_API_BASE_URL"],
31
  css: {
32
  preprocessorOptions: {
33
  scss: {
 
27
  chrome129IssuePlugin(),
28
  config.mode === 'production' && optimizeCssModules({ apply: 'build' }),
29
  ],
30
+ envPrefix:["VITE_","OPENAI_LIKE_API_","OLLAMA_API_BASE_URL","LMSTUDIO_API_BASE_URL"],
31
  css: {
32
  preprocessorOptions: {
33
  scss: {
worker-configuration.d.ts CHANGED
@@ -7,4 +7,5 @@ interface Env {
7
  OPENAI_LIKE_API_KEY: string;
8
  OPENAI_LIKE_API_BASE_URL: string;
9
  DEEPSEEK_API_KEY: string;
 
10
  }
 
7
  OPENAI_LIKE_API_KEY: string;
8
  OPENAI_LIKE_API_BASE_URL: string;
9
  DEEPSEEK_API_KEY: string;
10
+ LMSTUDIO_API_BASE_URL: string;
11
  }