chore: cleanup logging
Browse files
app/lib/.server/llm/stream-text.ts
CHANGED
@@ -5,7 +5,6 @@ import { getModel } from '~/lib/.server/llm/model';
|
|
5 |
import { MAX_TOKENS } from './constants';
|
6 |
import { getSystemPrompt } from './prompts';
|
7 |
import { MODEL_LIST, DEFAULT_MODEL, DEFAULT_PROVIDER } from '~/utils/constants';
|
8 |
-
import { logger } from '~/utils/logger';
|
9 |
|
10 |
interface ToolResult<Name extends string, Args, Result> {
|
11 |
toolCallId: string;
|
@@ -41,7 +40,6 @@ function extractModelFromMessage(message: Message): { model: string; content: st
|
|
41 |
|
42 |
export function streamText(messages: Messages, env: Env, options?: StreamingOptions) {
|
43 |
let currentModel = DEFAULT_MODEL;
|
44 |
-
logger.debug('model List', JSON.stringify(MODEL_LIST, null, 2))
|
45 |
const processedMessages = messages.map((message) => {
|
46 |
if (message.role === 'user') {
|
47 |
const { model, content } = extractModelFromMessage(message);
|
|
|
5 |
import { MAX_TOKENS } from './constants';
|
6 |
import { getSystemPrompt } from './prompts';
|
7 |
import { MODEL_LIST, DEFAULT_MODEL, DEFAULT_PROVIDER } from '~/utils/constants';
|
|
|
8 |
|
9 |
interface ToolResult<Name extends string, Args, Result> {
|
10 |
toolCallId: string;
|
|
|
40 |
|
41 |
export function streamText(messages: Messages, env: Env, options?: StreamingOptions) {
|
42 |
let currentModel = DEFAULT_MODEL;
|
|
|
43 |
const processedMessages = messages.map((message) => {
|
44 |
if (message.role === 'user') {
|
45 |
const { model, content } = extractModelFromMessage(message);
|