fernsdavid25
commited on
Update max_tokens in constants.ts
Browse filesmax_tokens for llama 3.1 models must be less than or equal to 8000 but it is set to 8192. just change it to 8000 and the error is fixed.
app/lib/.server/llm/constants.ts
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
// see https://docs.anthropic.com/en/docs/about-claude/models
|
2 |
-
export const MAX_TOKENS =
|
3 |
|
4 |
// limits the number of model responses that can be returned in a single request
|
5 |
export const MAX_RESPONSE_SEGMENTS = 2;
|
|
|
1 |
// see https://docs.anthropic.com/en/docs/about-claude/models
|
2 |
+
export const MAX_TOKENS = 8000;
|
3 |
|
4 |
// limits the number of model responses that can be returned in a single request
|
5 |
export const MAX_RESPONSE_SEGMENTS = 2;
|