Eduards commited on
Commit
4b492b9
·
unverified ·
2 Parent(s): c968948 32ae66a

Merge pull request #104 from karrot0/main

Browse files
.env.example CHANGED
@@ -43,6 +43,12 @@ OPENAI_LIKE_API_KEY=
43
  # You only need this environment variable set if you want to use Mistral models
44
  MISTRAL_API_KEY=
45
 
 
 
 
 
 
 
46
  # Get your xAI API key
47
  # https://x.ai/api
48
  # You only need this environment variable set if you want to use xAI models
 
43
  # You only need this environment variable set if you want to use Mistral models
44
  MISTRAL_API_KEY=
45
 
46
+
47
+ # Get LMStudio Base URL from LM Studio Developer Console
48
+ # Make sure to enable CORS
49
+ # Example: http://localhost:1234
50
+ LMSTUDIO_API_BASE_URL=
51
+
52
  # Get your xAI API key
53
  # https://x.ai/api
54
  # You only need this environment variable set if you want to use xAI models
app/components/chat/BaseChat.tsx CHANGED
@@ -49,6 +49,9 @@ const ModelSelector = ({ model, setModel, provider, setProvider, modelList, prov
49
  <option key="OpenAILike" value="OpenAILike">
50
  OpenAILike
51
  </option>
 
 
 
52
  </select>
53
  <select
54
  value={model}
 
49
  <option key="OpenAILike" value="OpenAILike">
50
  OpenAILike
51
  </option>
52
+ <option key="LMStudio" value="LMStudio">
53
+ LMStudio
54
+ </option>
55
  </select>
56
  <select
57
  value={model}
app/lib/.server/llm/api-key.ts CHANGED
@@ -42,6 +42,8 @@ export function getBaseURL(cloudflareEnv: Env, provider: string) {
42
  switch (provider) {
43
  case 'OpenAILike':
44
  return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
 
 
45
  case 'Ollama':
46
  let baseUrl = env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || "http://localhost:11434";
47
  if (env.RUNNING_IN_DOCKER === 'true') {
 
42
  switch (provider) {
43
  case 'OpenAILike':
44
  return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
45
+ case 'LMStudio':
46
+ return env.LMSTUDIO_API_BASE_URL || cloudflareEnv.LMSTUDIO_API_BASE_URL || "http://localhost:1234";
47
  case 'Ollama':
48
  let baseUrl = env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || "http://localhost:11434";
49
  if (env.RUNNING_IN_DOCKER === 'true') {
app/lib/.server/llm/model.ts CHANGED
@@ -83,6 +83,15 @@ export function getOpenRouterModel(apiKey: string, model: string) {
83
  return openRouter.chat(model);
84
  }
85
 
 
 
 
 
 
 
 
 
 
86
  export function getXAIModel(apiKey: string, model: string) {
87
  const openai = createOpenAI({
88
  baseURL: 'https://api.x.ai/v1',
@@ -105,13 +114,15 @@ export function getModel(provider: string, model: string, env: Env, apiKeys?: Re
105
  case 'OpenRouter':
106
  return getOpenRouterModel(apiKey, model);
107
  case 'Google':
108
- return getGoogleModel(apiKey, model)
109
  case 'OpenAILike':
110
  return getOpenAILikeModel(baseURL,apiKey, model);
111
  case 'Deepseek':
112
- return getDeepseekModel(apiKey, model)
113
  case 'Mistral':
114
  return getMistralModel(apiKey, model);
 
 
115
  case 'xAI':
116
  return getXAIModel(apiKey, model);
117
  default:
 
83
  return openRouter.chat(model);
84
  }
85
 
86
+ export function getLMStudioModel(baseURL: string, model: string) {
87
+ const lmstudio = createOpenAI({
88
+ baseUrl: `${baseURL}/v1`,
89
+ apiKey: "",
90
+ });
91
+
92
+ return lmstudio(model);
93
+ }
94
+
95
  export function getXAIModel(apiKey: string, model: string) {
96
  const openai = createOpenAI({
97
  baseURL: 'https://api.x.ai/v1',
 
114
  case 'OpenRouter':
115
  return getOpenRouterModel(apiKey, model);
116
  case 'Google':
117
+ return getGoogleModel(apiKey, model);
118
  case 'OpenAILike':
119
  return getOpenAILikeModel(baseURL,apiKey, model);
120
  case 'Deepseek':
121
+ return getDeepseekModel(apiKey, model);
122
  case 'Mistral':
123
  return getMistralModel(apiKey, model);
124
+ case 'LMStudio':
125
+ return getLMStudioModel(baseURL, model);
126
  case 'xAI':
127
  return getXAIModel(apiKey, model);
128
  default:
app/utils/constants.ts CHANGED
@@ -107,10 +107,28 @@ async function getOpenAILikeModels(): Promise<ModelInfo[]> {
107
  }
108
 
109
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
110
  async function initializeModelList(): Promise<void> {
111
  const ollamaModels = await getOllamaModels();
112
  const openAiLikeModels = await getOpenAILikeModels();
113
- MODEL_LIST = [...ollamaModels, ...openAiLikeModels, ...staticModels];
 
114
  }
115
  initializeModelList().then();
116
- export { getOllamaModels, getOpenAILikeModels, initializeModelList };
 
107
  }
108
 
109
  }
110
+
111
+ async function getLMStudioModels(): Promise<ModelInfo[]> {
112
+ try {
113
+ const base_url = import.meta.env.LMSTUDIO_API_BASE_URL || "http://localhost:1234";
114
+ const response = await fetch(`${base_url}/v1/models`);
115
+ const data = await response.json() as any;
116
+ return data.data.map((model: any) => ({
117
+ name: model.id,
118
+ label: model.id,
119
+ provider: 'LMStudio',
120
+ }));
121
+ } catch (e) {
122
+ return [];
123
+ }
124
+ }
125
+
126
+
127
  async function initializeModelList(): Promise<void> {
128
  const ollamaModels = await getOllamaModels();
129
  const openAiLikeModels = await getOpenAILikeModels();
130
+ const lmstudioModels = await getLMStudioModels();
131
+ MODEL_LIST = [...ollamaModels,...openAiLikeModels, ...staticModels,...lmstudioModels,];
132
  }
133
  initializeModelList().then();
134
+ export { getOllamaModels,getOpenAILikeModels,getLMStudioModels,initializeModelList };
vite.config.ts CHANGED
@@ -27,7 +27,7 @@ export default defineConfig((config) => {
27
  chrome129IssuePlugin(),
28
  config.mode === 'production' && optimizeCssModules({ apply: 'build' }),
29
  ],
30
- envPrefix:["VITE_","OPENAI_LIKE_API_","OLLAMA_API_BASE_URL"],
31
  css: {
32
  preprocessorOptions: {
33
  scss: {
 
27
  chrome129IssuePlugin(),
28
  config.mode === 'production' && optimizeCssModules({ apply: 'build' }),
29
  ],
30
+ envPrefix:["VITE_","OPENAI_LIKE_API_","OLLAMA_API_BASE_URL","LMSTUDIO_API_BASE_URL"],
31
  css: {
32
  preprocessorOptions: {
33
  scss: {
worker-configuration.d.ts CHANGED
@@ -7,4 +7,5 @@ interface Env {
7
  OPENAI_LIKE_API_KEY: string;
8
  OPENAI_LIKE_API_BASE_URL: string;
9
  DEEPSEEK_API_KEY: string;
 
10
  }
 
7
  OPENAI_LIKE_API_KEY: string;
8
  OPENAI_LIKE_API_BASE_URL: string;
9
  DEEPSEEK_API_KEY: string;
10
+ LMSTUDIO_API_BASE_URL: string;
11
  }