@wonderwhy-er suggestion fix pr
Browse files
app/lib/.server/llm/model.ts
CHANGED
@@ -86,6 +86,7 @@ export function getOpenRouterModel(apiKey: string, model: string) {
|
|
86 |
export function getLMStudioModel(baseURL: string, model: string) {
|
87 |
const lmstudio = createOpenAI({
|
88 |
baseUrl: `${baseURL}/v1`,
|
|
|
89 |
});
|
90 |
|
91 |
return lmstudio(model);
|
|
|
86 |
export function getLMStudioModel(baseURL: string, model: string) {
|
87 |
const lmstudio = createOpenAI({
|
88 |
baseUrl: `${baseURL}/v1`,
|
89 |
+
apiKey: "",
|
90 |
});
|
91 |
|
92 |
return lmstudio(model);
|