Merge pull request #351 from hasanraiyan/main
Browse files- .env.example +4 -0
- README.md +2 -1
- app/lib/.server/llm/api-key.ts +2 -0
- app/lib/.server/llm/model.ts +14 -0
- app/lib/.server/llm/stream-text.ts +13 -5
- app/utils/constants.ts +69 -50
- app/utils/types.ts +1 -0
- package.json +1 -0
- pnpm-lock.yaml +47 -0
.env.example
CHANGED
@@ -49,6 +49,10 @@ OPENAI_LIKE_API_KEY=
|
|
49 |
# You only need this environment variable set if you want to use Mistral models
|
50 |
MISTRAL_API_KEY=
|
51 |
|
|
|
|
|
|
|
|
|
52 |
|
53 |
# Get LMStudio Base URL from LM Studio Developer Console
|
54 |
# Make sure to enable CORS
|
|
|
49 |
# You only need this environment variable set if you want to use Mistral models
|
50 |
MISTRAL_API_KEY=
|
51 |
|
52 |
+
# Get the Cohere Api key by following these instructions -
|
53 |
+
# https://dashboard.cohere.com/api-keys
|
54 |
+
# You only need this environment variable set if you want to use Cohere models
|
55 |
+
COHERE_API_KEY=
|
56 |
|
57 |
# Get LMStudio Base URL from LM Studio Developer Console
|
58 |
# Make sure to enable CORS
|
README.md
CHANGED
@@ -39,7 +39,8 @@ https://thinktank.ottomator.ai
|
|
39 |
- ⬜ Azure Open AI API Integration
|
40 |
- ⬜ Perplexity Integration
|
41 |
- ⬜ Vertex AI Integration
|
42 |
-
-
|
|
|
43 |
- ⬜ Deploy directly to Vercel/Netlify/other similar platforms
|
44 |
- ⬜ Prompt caching
|
45 |
- ⬜ Better prompt enhancing
|
|
|
39 |
- ⬜ Azure Open AI API Integration
|
40 |
- ⬜ Perplexity Integration
|
41 |
- ⬜ Vertex AI Integration
|
42 |
+
- ✅ Cohere Integration (@hasanraiyan)
|
43 |
+
- ✅ Dynamic model max token length (@hasanraiyan)
|
44 |
- ⬜ Deploy directly to Vercel/Netlify/other similar platforms
|
45 |
- ⬜ Prompt caching
|
46 |
- ⬜ Better prompt enhancing
|
app/lib/.server/llm/api-key.ts
CHANGED
@@ -35,6 +35,8 @@ export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Re
|
|
35 |
return env.OPENAI_LIKE_API_KEY || cloudflareEnv.OPENAI_LIKE_API_KEY;
|
36 |
case "xAI":
|
37 |
return env.XAI_API_KEY || cloudflareEnv.XAI_API_KEY;
|
|
|
|
|
38 |
default:
|
39 |
return "";
|
40 |
}
|
|
|
35 |
return env.OPENAI_LIKE_API_KEY || cloudflareEnv.OPENAI_LIKE_API_KEY;
|
36 |
case "xAI":
|
37 |
return env.XAI_API_KEY || cloudflareEnv.XAI_API_KEY;
|
38 |
+
case "Cohere":
|
39 |
+
return env.COHERE_API_KEY;
|
40 |
default:
|
41 |
return "";
|
42 |
}
|
app/lib/.server/llm/model.ts
CHANGED
@@ -7,6 +7,7 @@ import { createGoogleGenerativeAI } from '@ai-sdk/google';
|
|
7 |
import { ollama } from 'ollama-ai-provider';
|
8 |
import { createOpenRouter } from "@openrouter/ai-sdk-provider";
|
9 |
import { createMistral } from '@ai-sdk/mistral';
|
|
|
10 |
|
11 |
export function getAnthropicModel(apiKey: string, model: string) {
|
12 |
const anthropic = createAnthropic({
|
@@ -23,6 +24,15 @@ export function getOpenAILikeModel(baseURL:string,apiKey: string, model: string)
|
|
23 |
|
24 |
return openai(model);
|
25 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
26 |
export function getOpenAIModel(apiKey: string, model: string) {
|
27 |
const openai = createOpenAI({
|
28 |
apiKey,
|
@@ -108,6 +118,8 @@ export function getXAIModel(apiKey: string, model: string) {
|
|
108 |
|
109 |
return openai(model);
|
110 |
}
|
|
|
|
|
111 |
export function getModel(provider: string, model: string, env: Env, apiKeys?: Record<string, string>) {
|
112 |
const apiKey = getAPIKey(env, provider, apiKeys);
|
113 |
const baseURL = getBaseURL(env, provider);
|
@@ -135,6 +147,8 @@ export function getModel(provider: string, model: string, env: Env, apiKeys?: Re
|
|
135 |
return getLMStudioModel(baseURL, model);
|
136 |
case 'xAI':
|
137 |
return getXAIModel(apiKey, model);
|
|
|
|
|
138 |
default:
|
139 |
return getOllamaModel(baseURL, model);
|
140 |
}
|
|
|
7 |
import { ollama } from 'ollama-ai-provider';
|
8 |
import { createOpenRouter } from "@openrouter/ai-sdk-provider";
|
9 |
import { createMistral } from '@ai-sdk/mistral';
|
10 |
+
import { createCohere } from '@ai-sdk/cohere'
|
11 |
|
12 |
export function getAnthropicModel(apiKey: string, model: string) {
|
13 |
const anthropic = createAnthropic({
|
|
|
24 |
|
25 |
return openai(model);
|
26 |
}
|
27 |
+
|
28 |
+
export function getCohereAIModel(apiKey:string, model: string){
|
29 |
+
const cohere = createCohere({
|
30 |
+
apiKey,
|
31 |
+
});
|
32 |
+
|
33 |
+
return cohere(model);
|
34 |
+
}
|
35 |
+
|
36 |
export function getOpenAIModel(apiKey: string, model: string) {
|
37 |
const openai = createOpenAI({
|
38 |
apiKey,
|
|
|
118 |
|
119 |
return openai(model);
|
120 |
}
|
121 |
+
|
122 |
+
|
123 |
export function getModel(provider: string, model: string, env: Env, apiKeys?: Record<string, string>) {
|
124 |
const apiKey = getAPIKey(env, provider, apiKeys);
|
125 |
const baseURL = getBaseURL(env, provider);
|
|
|
147 |
return getLMStudioModel(baseURL, model);
|
148 |
case 'xAI':
|
149 |
return getXAIModel(apiKey, model);
|
150 |
+
case 'Cohere':
|
151 |
+
return getCohereAIModel(apiKey, model);
|
152 |
default:
|
153 |
return getOllamaModel(baseURL, model);
|
154 |
}
|
app/lib/.server/llm/stream-text.ts
CHANGED
@@ -41,10 +41,9 @@ function extractPropertiesFromMessage(message: Message): { model: string; provid
|
|
41 |
|
42 |
return { model, provider, content: cleanedContent };
|
43 |
}
|
44 |
-
|
45 |
export function streamText(
|
46 |
-
messages: Messages,
|
47 |
-
env: Env,
|
48 |
options?: StreamingOptions,
|
49 |
apiKeys?: Record<string, string>
|
50 |
) {
|
@@ -64,13 +63,22 @@ export function streamText(
|
|
64 |
return { ...message, content };
|
65 |
}
|
66 |
|
67 |
-
return message;
|
68 |
});
|
69 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
70 |
return _streamText({
|
71 |
model: getModel(currentProvider, currentModel, env, apiKeys),
|
72 |
system: getSystemPrompt(),
|
73 |
-
maxTokens:
|
74 |
messages: convertToCoreMessages(processedMessages),
|
75 |
...options,
|
76 |
});
|
|
|
41 |
|
42 |
return { model, provider, content: cleanedContent };
|
43 |
}
|
|
|
44 |
export function streamText(
|
45 |
+
messages: Messages,
|
46 |
+
env: Env,
|
47 |
options?: StreamingOptions,
|
48 |
apiKeys?: Record<string, string>
|
49 |
) {
|
|
|
63 |
return { ...message, content };
|
64 |
}
|
65 |
|
66 |
+
return message;
|
67 |
});
|
68 |
|
69 |
+
const modelDetails = MODEL_LIST.find((m) => m.name === currentModel);
|
70 |
+
|
71 |
+
|
72 |
+
|
73 |
+
const dynamicMaxTokens =
|
74 |
+
modelDetails && modelDetails.maxTokenAllowed
|
75 |
+
? modelDetails.maxTokenAllowed
|
76 |
+
: MAX_TOKENS;
|
77 |
+
|
78 |
return _streamText({
|
79 |
model: getModel(currentProvider, currentModel, env, apiKeys),
|
80 |
system: getSystemPrompt(),
|
81 |
+
maxTokens: dynamicMaxTokens,
|
82 |
messages: convertToCoreMessages(processedMessages),
|
83 |
...options,
|
84 |
});
|
app/utils/constants.ts
CHANGED
@@ -12,12 +12,12 @@ const PROVIDER_LIST: ProviderInfo[] = [
|
|
12 |
{
|
13 |
name: 'Anthropic',
|
14 |
staticModels: [
|
15 |
-
{ name: 'claude-3-5-sonnet-latest', label: 'Claude 3.5 Sonnet (new)', provider: 'Anthropic' },
|
16 |
-
{ name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet (old)', provider: 'Anthropic' },
|
17 |
-
{ name: 'claude-3-5-haiku-latest', label: 'Claude 3.5 Haiku (new)', provider: 'Anthropic' },
|
18 |
-
{ name: 'claude-3-opus-latest', label: 'Claude 3 Opus', provider: 'Anthropic' },
|
19 |
-
{ name: 'claude-3-sonnet-20240229', label: 'Claude 3 Sonnet', provider: 'Anthropic' },
|
20 |
-
{ name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic' }
|
21 |
],
|
22 |
getApiKeyLink: "https://console.anthropic.com/settings/keys",
|
23 |
},
|
@@ -33,23 +33,40 @@ const PROVIDER_LIST: ProviderInfo[] = [
|
|
33 |
staticModels: [],
|
34 |
getDynamicModels: getOpenAILikeModels
|
35 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
36 |
{
|
37 |
name: 'OpenRouter',
|
38 |
staticModels: [
|
39 |
-
{ name: 'gpt-4o', label: 'GPT-4o', provider: 'OpenAI' },
|
40 |
{
|
41 |
name: 'anthropic/claude-3.5-sonnet',
|
42 |
label: 'Anthropic: Claude 3.5 Sonnet (OpenRouter)',
|
43 |
provider: 'OpenRouter'
|
|
|
44 |
},
|
45 |
-
{ name: 'anthropic/claude-3-haiku', label: 'Anthropic: Claude 3 Haiku (OpenRouter)', provider: 'OpenRouter' },
|
46 |
-
{ name: 'deepseek/deepseek-coder', label: 'Deepseek-Coder V2 236B (OpenRouter)', provider: 'OpenRouter' },
|
47 |
-
{ name: 'google/gemini-flash-1.5', label: 'Google Gemini Flash 1.5 (OpenRouter)', provider: 'OpenRouter' },
|
48 |
-
{ name: 'google/gemini-pro-1.5', label: 'Google Gemini Pro 1.5 (OpenRouter)', provider: 'OpenRouter' },
|
49 |
-
{ name: 'x-ai/grok-beta', label: 'xAI Grok Beta (OpenRouter)', provider: 'OpenRouter' },
|
50 |
-
{ name: 'mistralai/mistral-nemo', label: 'OpenRouter Mistral Nemo (OpenRouter)', provider: 'OpenRouter' },
|
51 |
-
{ name: 'qwen/qwen-110b-chat', label: 'OpenRouter Qwen 110b Chat (OpenRouter)', provider: 'OpenRouter' },
|
52 |
-
{ name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter' }
|
53 |
],
|
54 |
getDynamicModels: getOpenRouterModels,
|
55 |
getApiKeyLink: 'https://openrouter.ai/settings/keys',
|
@@ -57,70 +74,70 @@ const PROVIDER_LIST: ProviderInfo[] = [
|
|
57 |
}, {
|
58 |
name: 'Google',
|
59 |
staticModels: [
|
60 |
-
{ name: 'gemini-1.5-flash-latest', label: 'Gemini 1.5 Flash', provider: 'Google' },
|
61 |
-
{ name: 'gemini-1.5-flash-002', label: 'Gemini 1.5 Flash-002', provider: 'Google' },
|
62 |
-
{ name: 'gemini-1.5-flash-8b', label: 'Gemini 1.5 Flash-8b', provider: 'Google' },
|
63 |
-
{ name: 'gemini-1.5-pro-latest', label: 'Gemini 1.5 Pro', provider: 'Google' },
|
64 |
-
{ name: 'gemini-1.5-pro-002', label: 'Gemini 1.5 Pro-002', provider: 'Google' },
|
65 |
-
{ name: 'gemini-exp-1114', label: 'Gemini exp-1114', provider: 'Google' }
|
66 |
],
|
67 |
getApiKeyLink: 'https://aistudio.google.com/app/apikey'
|
68 |
}, {
|
69 |
name: 'Groq',
|
70 |
staticModels: [
|
71 |
-
{ name: 'llama-3.1-70b-versatile', label: 'Llama 3.1 70b (Groq)', provider: 'Groq' },
|
72 |
-
{ name: 'llama-3.1-8b-instant', label: 'Llama 3.1 8b (Groq)', provider: 'Groq' },
|
73 |
-
{ name: 'llama-3.2-11b-vision-preview', label: 'Llama 3.2 11b (Groq)', provider: 'Groq' },
|
74 |
-
{ name: 'llama-3.2-3b-preview', label: 'Llama 3.2 3b (Groq)', provider: 'Groq' },
|
75 |
-
{ name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq' }
|
76 |
],
|
77 |
getApiKeyLink: 'https://console.groq.com/keys'
|
78 |
},
|
79 |
{
|
80 |
name: 'HuggingFace',
|
81 |
staticModels: [
|
82 |
-
{ name: 'Qwen/Qwen2.5-Coder-32B-Instruct', label: 'Qwen2.5-Coder-32B-Instruct (HuggingFace)', provider: 'HuggingFace' },
|
83 |
-
{ name: '01-ai/Yi-1.5-34B-Chat', label: 'Yi-1.5-34B-Chat (HuggingFace)', provider: 'HuggingFace' },
|
84 |
-
{ name: 'codellama/CodeLlama-34b-Instruct-hf', label: 'CodeLlama-34b-Instruct (HuggingFace)', provider: 'HuggingFace' },
|
85 |
-
{ name: 'NousResearch/Hermes-3-Llama-3.1-8B', label: 'Hermes-3-Llama-3.1-8B (HuggingFace)', provider: 'HuggingFace' }
|
86 |
],
|
87 |
getApiKeyLink: 'https://huggingface.co/settings/tokens'
|
88 |
},
|
89 |
-
|
90 |
{
|
91 |
name: 'OpenAI',
|
92 |
staticModels: [
|
93 |
-
{ name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI' },
|
94 |
-
{ name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI' },
|
95 |
-
{ name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI' },
|
96 |
-
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI' }
|
97 |
],
|
98 |
getApiKeyLink: "https://platform.openai.com/api-keys",
|
99 |
}, {
|
100 |
name: 'xAI',
|
101 |
staticModels: [
|
102 |
-
{ name: 'grok-beta', label: 'xAI Grok Beta', provider: 'xAI' }
|
103 |
],
|
104 |
getApiKeyLink: 'https://docs.x.ai/docs/quickstart#creating-an-api-key'
|
105 |
}, {
|
106 |
name: 'Deepseek',
|
107 |
staticModels: [
|
108 |
-
{ name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek' },
|
109 |
-
{ name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek' }
|
110 |
],
|
111 |
getApiKeyLink: 'https://platform.deepseek.com/api_keys'
|
112 |
}, {
|
113 |
name: 'Mistral',
|
114 |
staticModels: [
|
115 |
-
{ name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral' },
|
116 |
-
{ name: 'open-mixtral-8x7b', label: 'Mistral 8x7B', provider: 'Mistral' },
|
117 |
-
{ name: 'open-mixtral-8x22b', label: 'Mistral 8x22B', provider: 'Mistral' },
|
118 |
-
{ name: 'open-codestral-mamba', label: 'Codestral Mamba', provider: 'Mistral' },
|
119 |
-
{ name: 'open-mistral-nemo', label: 'Mistral Nemo', provider: 'Mistral' },
|
120 |
-
{ name: 'ministral-8b-latest', label: 'Mistral 8B', provider: 'Mistral' },
|
121 |
-
{ name: 'mistral-small-latest', label: 'Mistral Small', provider: 'Mistral' },
|
122 |
-
{ name: 'codestral-latest', label: 'Codestral', provider: 'Mistral' },
|
123 |
-
{ name: 'mistral-large-latest', label: 'Mistral Large Latest', provider: 'Mistral' }
|
124 |
],
|
125 |
getApiKeyLink: 'https://console.mistral.ai/api-keys/'
|
126 |
}, {
|
@@ -164,7 +181,8 @@ async function getOllamaModels(): Promise<ModelInfo[]> {
|
|
164 |
return data.models.map((model: OllamaModel) => ({
|
165 |
name: model.name,
|
166 |
label: `${model.name} (${model.details.parameter_size})`,
|
167 |
-
provider: 'Ollama'
|
|
|
168 |
}));
|
169 |
} catch (e) {
|
170 |
return [];
|
@@ -217,8 +235,9 @@ async function getOpenRouterModels(): Promise<ModelInfo[]> {
|
|
217 |
name: m.id,
|
218 |
label: `${m.name} - in:$${(m.pricing.prompt * 1_000_000).toFixed(
|
219 |
2)} out:$${(m.pricing.completion * 1_000_000).toFixed(2)} - context ${Math.floor(
|
220 |
-
|
221 |
-
provider: 'OpenRouter'
|
|
|
222 |
}));
|
223 |
}
|
224 |
|
|
|
12 |
{
|
13 |
name: 'Anthropic',
|
14 |
staticModels: [
|
15 |
+
{ name: 'claude-3-5-sonnet-latest', label: 'Claude 3.5 Sonnet (new)', provider: 'Anthropic', maxTokenAllowed: 8000 },
|
16 |
+
{ name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet (old)', provider: 'Anthropic', maxTokenAllowed: 8000 },
|
17 |
+
{ name: 'claude-3-5-haiku-latest', label: 'Claude 3.5 Haiku (new)', provider: 'Anthropic', maxTokenAllowed: 8000 },
|
18 |
+
{ name: 'claude-3-opus-latest', label: 'Claude 3 Opus', provider: 'Anthropic', maxTokenAllowed: 8000 },
|
19 |
+
{ name: 'claude-3-sonnet-20240229', label: 'Claude 3 Sonnet', provider: 'Anthropic', maxTokenAllowed: 8000 },
|
20 |
+
{ name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic', maxTokenAllowed: 8000 }
|
21 |
],
|
22 |
getApiKeyLink: "https://console.anthropic.com/settings/keys",
|
23 |
},
|
|
|
33 |
staticModels: [],
|
34 |
getDynamicModels: getOpenAILikeModels
|
35 |
},
|
36 |
+
{
|
37 |
+
name: 'Cohere',
|
38 |
+
staticModels: [
|
39 |
+
{ name: 'command-r-plus-08-2024', label: 'Command R plus Latest', provider: 'Cohere', maxTokenAllowed: 4096 },
|
40 |
+
{ name: 'command-r-08-2024', label: 'Command R Latest', provider: 'Cohere', maxTokenAllowed: 4096 },
|
41 |
+
{ name: 'command-r-plus', label: 'Command R plus', provider: 'Cohere', maxTokenAllowed: 4096 },
|
42 |
+
{ name: 'command-r', label: 'Command R', provider: 'Cohere', maxTokenAllowed: 4096 },
|
43 |
+
{ name: 'command', label: 'Command', provider: 'Cohere', maxTokenAllowed: 4096 },
|
44 |
+
{ name: 'command-nightly', label: 'Command Nightly', provider: 'Cohere', maxTokenAllowed: 4096 },
|
45 |
+
{ name: 'command-light', label: 'Command Light', provider: 'Cohere', maxTokenAllowed: 4096 },
|
46 |
+
{ name: 'command-light-nightly', label: 'Command Light Nightly', provider: 'Cohere', maxTokenAllowed: 4096 },
|
47 |
+
{ name: 'c4ai-aya-expanse-8b', label: 'c4AI Aya Expanse 8b', provider: 'Cohere', maxTokenAllowed: 4096 },
|
48 |
+
{ name: 'c4ai-aya-expanse-32b', label: 'c4AI Aya Expanse 32b', provider: 'Cohere', maxTokenAllowed: 4096 },
|
49 |
+
],
|
50 |
+
getApiKeyLink: 'https://dashboard.cohere.com/api-keys'
|
51 |
+
},
|
52 |
{
|
53 |
name: 'OpenRouter',
|
54 |
staticModels: [
|
55 |
+
{ name: 'gpt-4o', label: 'GPT-4o', provider: 'OpenAI', maxTokenAllowed: 8000 },
|
56 |
{
|
57 |
name: 'anthropic/claude-3.5-sonnet',
|
58 |
label: 'Anthropic: Claude 3.5 Sonnet (OpenRouter)',
|
59 |
provider: 'OpenRouter'
|
60 |
+
, maxTokenAllowed: 8000
|
61 |
},
|
62 |
+
{ name: 'anthropic/claude-3-haiku', label: 'Anthropic: Claude 3 Haiku (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
|
63 |
+
{ name: 'deepseek/deepseek-coder', label: 'Deepseek-Coder V2 236B (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
|
64 |
+
{ name: 'google/gemini-flash-1.5', label: 'Google Gemini Flash 1.5 (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
|
65 |
+
{ name: 'google/gemini-pro-1.5', label: 'Google Gemini Pro 1.5 (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
|
66 |
+
{ name: 'x-ai/grok-beta', label: 'xAI Grok Beta (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
|
67 |
+
{ name: 'mistralai/mistral-nemo', label: 'OpenRouter Mistral Nemo (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
|
68 |
+
{ name: 'qwen/qwen-110b-chat', label: 'OpenRouter Qwen 110b Chat (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
|
69 |
+
{ name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 4096 }
|
70 |
],
|
71 |
getDynamicModels: getOpenRouterModels,
|
72 |
getApiKeyLink: 'https://openrouter.ai/settings/keys',
|
|
|
74 |
}, {
|
75 |
name: 'Google',
|
76 |
staticModels: [
|
77 |
+
{ name: 'gemini-1.5-flash-latest', label: 'Gemini 1.5 Flash', provider: 'Google', maxTokenAllowed: 8192 },
|
78 |
+
{ name: 'gemini-1.5-flash-002', label: 'Gemini 1.5 Flash-002', provider: 'Google', maxTokenAllowed: 8192 },
|
79 |
+
{ name: 'gemini-1.5-flash-8b', label: 'Gemini 1.5 Flash-8b', provider: 'Google', maxTokenAllowed: 8192 },
|
80 |
+
{ name: 'gemini-1.5-pro-latest', label: 'Gemini 1.5 Pro', provider: 'Google', maxTokenAllowed: 8192 },
|
81 |
+
{ name: 'gemini-1.5-pro-002', label: 'Gemini 1.5 Pro-002', provider: 'Google', maxTokenAllowed: 8192 },
|
82 |
+
{ name: 'gemini-exp-1114', label: 'Gemini exp-1114', provider: 'Google', maxTokenAllowed: 8192 }
|
83 |
],
|
84 |
getApiKeyLink: 'https://aistudio.google.com/app/apikey'
|
85 |
}, {
|
86 |
name: 'Groq',
|
87 |
staticModels: [
|
88 |
+
{ name: 'llama-3.1-70b-versatile', label: 'Llama 3.1 70b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
|
89 |
+
{ name: 'llama-3.1-8b-instant', label: 'Llama 3.1 8b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
|
90 |
+
{ name: 'llama-3.2-11b-vision-preview', label: 'Llama 3.2 11b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
|
91 |
+
{ name: 'llama-3.2-3b-preview', label: 'Llama 3.2 3b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
|
92 |
+
{ name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 }
|
93 |
],
|
94 |
getApiKeyLink: 'https://console.groq.com/keys'
|
95 |
},
|
96 |
{
|
97 |
name: 'HuggingFace',
|
98 |
staticModels: [
|
99 |
+
{ name: 'Qwen/Qwen2.5-Coder-32B-Instruct', label: 'Qwen2.5-Coder-32B-Instruct (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 },
|
100 |
+
{ name: '01-ai/Yi-1.5-34B-Chat', label: 'Yi-1.5-34B-Chat (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 },
|
101 |
+
{ name: 'codellama/CodeLlama-34b-Instruct-hf', label: 'CodeLlama-34b-Instruct (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 },
|
102 |
+
{ name: 'NousResearch/Hermes-3-Llama-3.1-8B', label: 'Hermes-3-Llama-3.1-8B (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 }
|
103 |
],
|
104 |
getApiKeyLink: 'https://huggingface.co/settings/tokens'
|
105 |
},
|
106 |
+
|
107 |
{
|
108 |
name: 'OpenAI',
|
109 |
staticModels: [
|
110 |
+
{ name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI', maxTokenAllowed: 8000 },
|
111 |
+
{ name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI', maxTokenAllowed: 8000 },
|
112 |
+
{ name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI', maxTokenAllowed: 8000 },
|
113 |
+
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI', maxTokenAllowed: 8000 }
|
114 |
],
|
115 |
getApiKeyLink: "https://platform.openai.com/api-keys",
|
116 |
}, {
|
117 |
name: 'xAI',
|
118 |
staticModels: [
|
119 |
+
{ name: 'grok-beta', label: 'xAI Grok Beta', provider: 'xAI', maxTokenAllowed: 8000 }
|
120 |
],
|
121 |
getApiKeyLink: 'https://docs.x.ai/docs/quickstart#creating-an-api-key'
|
122 |
}, {
|
123 |
name: 'Deepseek',
|
124 |
staticModels: [
|
125 |
+
{ name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek', maxTokenAllowed: 8000 },
|
126 |
+
{ name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek', maxTokenAllowed: 8000 }
|
127 |
],
|
128 |
getApiKeyLink: 'https://platform.deepseek.com/api_keys'
|
129 |
}, {
|
130 |
name: 'Mistral',
|
131 |
staticModels: [
|
132 |
+
{ name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral', maxTokenAllowed: 8000 },
|
133 |
+
{ name: 'open-mixtral-8x7b', label: 'Mistral 8x7B', provider: 'Mistral', maxTokenAllowed: 8000 },
|
134 |
+
{ name: 'open-mixtral-8x22b', label: 'Mistral 8x22B', provider: 'Mistral', maxTokenAllowed: 8000 },
|
135 |
+
{ name: 'open-codestral-mamba', label: 'Codestral Mamba', provider: 'Mistral', maxTokenAllowed: 8000 },
|
136 |
+
{ name: 'open-mistral-nemo', label: 'Mistral Nemo', provider: 'Mistral', maxTokenAllowed: 8000 },
|
137 |
+
{ name: 'ministral-8b-latest', label: 'Mistral 8B', provider: 'Mistral', maxTokenAllowed: 8000 },
|
138 |
+
{ name: 'mistral-small-latest', label: 'Mistral Small', provider: 'Mistral', maxTokenAllowed: 8000 },
|
139 |
+
{ name: 'codestral-latest', label: 'Codestral', provider: 'Mistral', maxTokenAllowed: 8000 },
|
140 |
+
{ name: 'mistral-large-latest', label: 'Mistral Large Latest', provider: 'Mistral', maxTokenAllowed: 8000 }
|
141 |
],
|
142 |
getApiKeyLink: 'https://console.mistral.ai/api-keys/'
|
143 |
}, {
|
|
|
181 |
return data.models.map((model: OllamaModel) => ({
|
182 |
name: model.name,
|
183 |
label: `${model.name} (${model.details.parameter_size})`,
|
184 |
+
provider: 'Ollama',
|
185 |
+
maxTokenAllowed:8000,
|
186 |
}));
|
187 |
} catch (e) {
|
188 |
return [];
|
|
|
235 |
name: m.id,
|
236 |
label: `${m.name} - in:$${(m.pricing.prompt * 1_000_000).toFixed(
|
237 |
2)} out:$${(m.pricing.completion * 1_000_000).toFixed(2)} - context ${Math.floor(
|
238 |
+
m.context_length / 1000)}k`,
|
239 |
+
provider: 'OpenRouter',
|
240 |
+
maxTokenAllowed:8000,
|
241 |
}));
|
242 |
}
|
243 |
|
app/utils/types.ts
CHANGED
@@ -25,6 +25,7 @@ export interface ModelInfo {
|
|
25 |
name: string;
|
26 |
label: string;
|
27 |
provider: string;
|
|
|
28 |
}
|
29 |
|
30 |
export interface ProviderInfo {
|
|
|
25 |
name: string;
|
26 |
label: string;
|
27 |
provider: string;
|
28 |
+
maxTokenAllowed: number;
|
29 |
}
|
30 |
|
31 |
export interface ProviderInfo {
|
package.json
CHANGED
@@ -27,6 +27,7 @@
|
|
27 |
},
|
28 |
"dependencies": {
|
29 |
"@ai-sdk/anthropic": "^0.0.39",
|
|
|
30 |
"@ai-sdk/google": "^0.0.52",
|
31 |
"@ai-sdk/mistral": "^0.0.43",
|
32 |
"@ai-sdk/openai": "^0.0.66",
|
|
|
27 |
},
|
28 |
"dependencies": {
|
29 |
"@ai-sdk/anthropic": "^0.0.39",
|
30 |
+
"@ai-sdk/cohere": "^1.0.1",
|
31 |
"@ai-sdk/google": "^0.0.52",
|
32 |
"@ai-sdk/mistral": "^0.0.43",
|
33 |
"@ai-sdk/openai": "^0.0.66",
|
pnpm-lock.yaml
CHANGED
@@ -14,6 +14,9 @@ importers:
|
|
14 |
'@ai-sdk/anthropic':
|
15 |
specifier: ^0.0.39
|
16 |
version: 0.0.39([email protected])
|
|
|
|
|
|
|
17 |
'@ai-sdk/google':
|
18 |
specifier: ^0.0.52
|
19 |
version: 0.0.52([email protected])
|
@@ -279,6 +282,12 @@ packages:
|
|
279 |
peerDependencies:
|
280 |
zod: ^3.0.0
|
281 |
|
|
|
|
|
|
|
|
|
|
|
|
|
282 |
'@ai-sdk/[email protected]':
|
283 |
resolution: {integrity: sha512-bfsA/1Ae0SQ6NfLwWKs5SU4MBwlzJjVhK6bTVBicYFjUxg9liK/W76P1Tq/qK9OlrODACz3i1STOIWsFPpIOuQ==}
|
284 |
engines: {node: '>=18'}
|
@@ -324,6 +333,15 @@ packages:
|
|
324 |
zod:
|
325 |
optional: true
|
326 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
327 |
'@ai-sdk/[email protected]':
|
328 |
resolution: {integrity: sha512-oOwPQD8i2Ynpn22cur4sk26FW3mSy6t6/X/K1Ay2yGBKYiSpRyLfObhOrZEGsXDx+3euKy4nEZ193R36NM+tpQ==}
|
329 |
engines: {node: '>=18'}
|
@@ -336,6 +354,10 @@ packages:
|
|
336 |
resolution: {integrity: sha512-XMsNGJdGO+L0cxhhegtqZ8+T6nn4EoShS819OvCgI2kLbYTIvk0GWFGD0AXJmxkxs3DrpsJxKAFukFR7bvTkgQ==}
|
337 |
engines: {node: '>=18'}
|
338 |
|
|
|
|
|
|
|
|
|
339 |
'@ai-sdk/[email protected]':
|
340 |
resolution: {integrity: sha512-1asDpxgmeHWL0/EZPCLENxfOHT+0jce0z/zasRhascodm2S6f6/KZn5doLG9jdmarcb+GjMjFmmwyOVXz3W1xg==}
|
341 |
engines: {node: '>=18'}
|
@@ -3033,6 +3055,10 @@ packages:
|
|
3033 |
resolution: {integrity: sha512-v0eOBUbiaFojBu2s2NPBfYUoRR9GjcDNvCXVaqEf5vVfpIAh9f8RCo4vXTP8c63QRKCFwoLpMpTdPwwhEKVgzA==}
|
3034 |
engines: {node: '>=14.18'}
|
3035 |
|
|
|
|
|
|
|
|
|
3036 | |
3037 |
resolution: {integrity: sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==}
|
3038 |
|
@@ -5687,6 +5713,12 @@ snapshots:
|
|
5687 |
'@ai-sdk/provider-utils': 1.0.9([email protected])
|
5688 |
zod: 3.23.8
|
5689 |
|
|
|
|
|
|
|
|
|
|
|
|
|
5690 |
'@ai-sdk/[email protected]([email protected])':
|
5691 |
dependencies:
|
5692 |
'@ai-sdk/provider': 0.0.24
|
@@ -5733,6 +5765,15 @@ snapshots:
|
|
5733 |
optionalDependencies:
|
5734 |
zod: 3.23.8
|
5735 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5736 |
'@ai-sdk/[email protected]':
|
5737 |
dependencies:
|
5738 |
json-schema: 0.4.0
|
@@ -5745,6 +5786,10 @@ snapshots:
|
|
5745 |
dependencies:
|
5746 |
json-schema: 0.4.0
|
5747 |
|
|
|
|
|
|
|
|
|
5748 |
'@ai-sdk/[email protected]([email protected])([email protected])':
|
5749 |
dependencies:
|
5750 |
'@ai-sdk/provider-utils': 1.0.20([email protected])
|
@@ -8751,6 +8796,8 @@ snapshots:
|
|
8751 |
|
8752 | |
8753 |
|
|
|
|
|
8754 | |
8755 |
dependencies:
|
8756 |
md5.js: 1.3.5
|
|
|
14 |
'@ai-sdk/anthropic':
|
15 |
specifier: ^0.0.39
|
16 |
version: 0.0.39([email protected])
|
17 |
+
'@ai-sdk/cohere':
|
18 |
+
specifier: ^1.0.1
|
19 |
+
version: 1.0.1([email protected])
|
20 |
'@ai-sdk/google':
|
21 |
specifier: ^0.0.52
|
22 |
version: 0.0.52([email protected])
|
|
|
282 |
peerDependencies:
|
283 |
zod: ^3.0.0
|
284 |
|
285 |
+
'@ai-sdk/[email protected]':
|
286 |
+
resolution: {integrity: sha512-xLaSYl/hs9EqfpvT9PvqZrDWjJPQPZBd0iT32T6812vN6kwuEQ6sSgQvqHWczIqxeej2GNRgMQwDL6Lh0L5pZw==}
|
287 |
+
engines: {node: '>=18'}
|
288 |
+
peerDependencies:
|
289 |
+
zod: ^3.0.0
|
290 |
+
|
291 |
'@ai-sdk/[email protected]':
|
292 |
resolution: {integrity: sha512-bfsA/1Ae0SQ6NfLwWKs5SU4MBwlzJjVhK6bTVBicYFjUxg9liK/W76P1Tq/qK9OlrODACz3i1STOIWsFPpIOuQ==}
|
293 |
engines: {node: '>=18'}
|
|
|
333 |
zod:
|
334 |
optional: true
|
335 |
|
336 |
+
'@ai-sdk/[email protected]':
|
337 |
+
resolution: {integrity: sha512-TNg7rPhRtETB2Z9F0JpOvpGii9Fs8EWM8nYy1jEkvSXkrPJ6b/9zVnDdaJsmLFDyrMbOsPJlkblYtmYEQou36w==}
|
338 |
+
engines: {node: '>=18'}
|
339 |
+
peerDependencies:
|
340 |
+
zod: ^3.0.0
|
341 |
+
peerDependenciesMeta:
|
342 |
+
zod:
|
343 |
+
optional: true
|
344 |
+
|
345 |
'@ai-sdk/[email protected]':
|
346 |
resolution: {integrity: sha512-oOwPQD8i2Ynpn22cur4sk26FW3mSy6t6/X/K1Ay2yGBKYiSpRyLfObhOrZEGsXDx+3euKy4nEZ193R36NM+tpQ==}
|
347 |
engines: {node: '>=18'}
|
|
|
354 |
resolution: {integrity: sha512-XMsNGJdGO+L0cxhhegtqZ8+T6nn4EoShS819OvCgI2kLbYTIvk0GWFGD0AXJmxkxs3DrpsJxKAFukFR7bvTkgQ==}
|
355 |
engines: {node: '>=18'}
|
356 |
|
357 |
+
'@ai-sdk/[email protected]':
|
358 |
+
resolution: {integrity: sha512-Sj29AzooJ7SYvhPd+AAWt/E7j63E9+AzRnoMHUaJPRYzOd/WDrVNxxv85prF9gDcQ7XPVlSk9j6oAZV9/DXYpA==}
|
359 |
+
engines: {node: '>=18'}
|
360 |
+
|
361 |
'@ai-sdk/[email protected]':
|
362 |
resolution: {integrity: sha512-1asDpxgmeHWL0/EZPCLENxfOHT+0jce0z/zasRhascodm2S6f6/KZn5doLG9jdmarcb+GjMjFmmwyOVXz3W1xg==}
|
363 |
engines: {node: '>=18'}
|
|
|
3055 |
resolution: {integrity: sha512-v0eOBUbiaFojBu2s2NPBfYUoRR9GjcDNvCXVaqEf5vVfpIAh9f8RCo4vXTP8c63QRKCFwoLpMpTdPwwhEKVgzA==}
|
3056 |
engines: {node: '>=14.18'}
|
3057 |
|
3058 | |
3059 |
+
resolution: {integrity: sha512-T1C0XCUimhxVQzW4zFipdx0SficT651NnkR0ZSH3yQwh+mFMdLfgjABVi4YtMTtaL4s168593DaoaRLMqryavA==}
|
3060 |
+
engines: {node: '>=18.0.0'}
|
3061 |
+
|
3062 | |
3063 |
resolution: {integrity: sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==}
|
3064 |
|
|
|
5713 |
'@ai-sdk/provider-utils': 1.0.9([email protected])
|
5714 |
zod: 3.23.8
|
5715 |
|
5716 |
+
'@ai-sdk/[email protected]([email protected])':
|
5717 |
+
dependencies:
|
5718 |
+
'@ai-sdk/provider': 1.0.0
|
5719 |
+
'@ai-sdk/provider-utils': 2.0.1([email protected])
|
5720 |
+
zod: 3.23.8
|
5721 |
+
|
5722 |
'@ai-sdk/[email protected]([email protected])':
|
5723 |
dependencies:
|
5724 |
'@ai-sdk/provider': 0.0.24
|
|
|
5765 |
optionalDependencies:
|
5766 |
zod: 3.23.8
|
5767 |
|
5768 |
+
'@ai-sdk/[email protected]([email protected])':
|
5769 |
+
dependencies:
|
5770 |
+
'@ai-sdk/provider': 1.0.0
|
5771 |
+
eventsource-parser: 3.0.0
|
5772 |
+
nanoid: 3.3.7
|
5773 |
+
secure-json-parse: 2.7.0
|
5774 |
+
optionalDependencies:
|
5775 |
+
zod: 3.23.8
|
5776 |
+
|
5777 |
'@ai-sdk/[email protected]':
|
5778 |
dependencies:
|
5779 |
json-schema: 0.4.0
|
|
|
5786 |
dependencies:
|
5787 |
json-schema: 0.4.0
|
5788 |
|
5789 |
+
'@ai-sdk/[email protected]':
|
5790 |
+
dependencies:
|
5791 |
+
json-schema: 0.4.0
|
5792 |
+
|
5793 |
'@ai-sdk/[email protected]([email protected])([email protected])':
|
5794 |
dependencies:
|
5795 |
'@ai-sdk/provider-utils': 1.0.20([email protected])
|
|
|
8796 |
|
8797 | |
8798 |
|
8799 |
+
[email protected]: {}
|
8800 |
+
|
8801 | |
8802 |
dependencies:
|
8803 |
md5.js: 1.3.5
|