fix: removed context optimization temporarily, to be moved to optional from menu
Browse files- app/commit.json +1 -1
- app/lib/.server/llm/stream-text.ts +4 -3
- app/utils/constants.ts +0 -1
app/commit.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{ "commit": "
|
|
|
1 |
+
{ "commit": "8c4397a19f3eab2382082a39526d66385e9d2a49" }
|
app/lib/.server/llm/stream-text.ts
CHANGED
@@ -38,7 +38,7 @@ type Dirent = File | Folder;
|
|
38 |
|
39 |
export type FileMap = Record<string, Dirent | undefined>;
|
40 |
|
41 |
-
function simplifyBoltActions(input: string): string {
|
42 |
// Using regex to match boltAction tags that have type="file"
|
43 |
const regex = /(<boltAction[^>]*type="file"[^>]*>)([\s\S]*?)(<\/boltAction>)/g;
|
44 |
|
@@ -156,8 +156,9 @@ export async function streamText(props: {
|
|
156 |
|
157 |
return { ...message, content };
|
158 |
} else if (message.role == 'assistant') {
|
159 |
-
|
160 |
-
|
|
|
161 |
|
162 |
return { ...message, content };
|
163 |
}
|
|
|
38 |
|
39 |
export type FileMap = Record<string, Dirent | undefined>;
|
40 |
|
41 |
+
export function simplifyBoltActions(input: string): string {
|
42 |
// Using regex to match boltAction tags that have type="file"
|
43 |
const regex = /(<boltAction[^>]*type="file"[^>]*>)([\s\S]*?)(<\/boltAction>)/g;
|
44 |
|
|
|
156 |
|
157 |
return { ...message, content };
|
158 |
} else if (message.role == 'assistant') {
|
159 |
+
const content = message.content;
|
160 |
+
|
161 |
+
// content = simplifyBoltActions(content);
|
162 |
|
163 |
return { ...message, content };
|
164 |
}
|
app/utils/constants.ts
CHANGED
@@ -462,7 +462,6 @@ async function getOpenRouterModels(): Promise<ModelInfo[]> {
|
|
462 |
}
|
463 |
|
464 |
async function getLMStudioModels(_apiKeys?: Record<string, string>, settings?: IProviderSetting): Promise<ModelInfo[]> {
|
465 |
-
|
466 |
try {
|
467 |
const baseUrl = settings?.baseUrl || import.meta.env.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
|
468 |
const response = await fetch(`${baseUrl}/v1/models`);
|
|
|
462 |
}
|
463 |
|
464 |
async function getLMStudioModels(_apiKeys?: Record<string, string>, settings?: IProviderSetting): Promise<ModelInfo[]> {
|
|
|
465 |
try {
|
466 |
const baseUrl = settings?.baseUrl || import.meta.env.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
|
467 |
const response = await fetch(`${baseUrl}/v1/models`);
|