Merge pull request #2 from jonathands/main
Browse filesAdded Google Generative AI (gemini) integration
- .env.example +7 -2
- app/lib/.server/llm/api-key.ts +3 -0
- app/lib/.server/llm/model.ts +13 -1
- app/utils/constants.ts +7 -5
.env.example
CHANGED
@@ -18,7 +18,12 @@ ANTHROPIC_API_KEY=
|
|
18 |
# Get your OpenRouter API Key in your account settings -
|
19 |
# https://openrouter.ai/settings/keys
|
20 |
# You only need this environment variable set if you want to use OpenRouter models
|
21 |
-
OPEN_ROUTER_API_KEY=
|
|
|
|
|
|
|
|
|
|
|
22 |
|
23 |
# Include this environment variable if you want more logging for debugging locally
|
24 |
-
VITE_LOG_LEVEL=debug
|
|
|
18 |
# Get your OpenRouter API Key in your account settings -
|
19 |
# https://openrouter.ai/settings/keys
|
20 |
# You only need this environment variable set if you want to use OpenRouter models
|
21 |
+
OPEN_ROUTER_API_KEY=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
|
22 |
+
|
23 |
+
# Get your Google Generative AI API Key by following these instructions -
|
24 |
+
# https://console.cloud.google.com/apis/credentials
|
25 |
+
# You only need this environment variable set if you want to use Google Generative AI models
|
26 |
+
GOOGLE_GENERATIVE_AI_API_KEY=
|
27 |
|
28 |
# Include this environment variable if you want more logging for debugging locally
|
29 |
+
VITE_LOG_LEVEL=debug
|
app/lib/.server/llm/api-key.ts
CHANGED
@@ -7,11 +7,14 @@ export function getAPIKey(cloudflareEnv: Env, provider: string) {
|
|
7 |
* The `cloudflareEnv` is only used when deployed or when previewing locally.
|
8 |
* In development the environment variables are available through `env`.
|
9 |
*/
|
|
|
10 |
switch (provider) {
|
11 |
case 'Anthropic':
|
12 |
return env.ANTHROPIC_API_KEY || cloudflareEnv.ANTHROPIC_API_KEY;
|
13 |
case 'OpenAI':
|
14 |
return env.OPENAI_API_KEY || cloudflareEnv.OPENAI_API_KEY;
|
|
|
|
|
15 |
case 'Groq':
|
16 |
return env.GROQ_API_KEY || cloudflareEnv.GROQ_API_KEY;
|
17 |
case 'OpenRouter':
|
|
|
7 |
* The `cloudflareEnv` is only used when deployed or when previewing locally.
|
8 |
* In development the environment variables are available through `env`.
|
9 |
*/
|
10 |
+
|
11 |
switch (provider) {
|
12 |
case 'Anthropic':
|
13 |
return env.ANTHROPIC_API_KEY || cloudflareEnv.ANTHROPIC_API_KEY;
|
14 |
case 'OpenAI':
|
15 |
return env.OPENAI_API_KEY || cloudflareEnv.OPENAI_API_KEY;
|
16 |
+
case 'Google':
|
17 |
+
return env.GOOGLE_GENERATIVE_AI_API_KEY || cloudflareEnv.GOOGLE_GENERATIVE_AI_API_KEY;
|
18 |
case 'Groq':
|
19 |
return env.GROQ_API_KEY || cloudflareEnv.GROQ_API_KEY;
|
20 |
case 'OpenRouter':
|
app/lib/.server/llm/model.ts
CHANGED
@@ -3,6 +3,7 @@
|
|
3 |
import { getAPIKey } from '~/lib/.server/llm/api-key';
|
4 |
import { createAnthropic } from '@ai-sdk/anthropic';
|
5 |
import { createOpenAI } from '@ai-sdk/openai';
|
|
|
6 |
import { ollama } from 'ollama-ai-provider';
|
7 |
import { createOpenRouter } from "@openrouter/ai-sdk-provider";
|
8 |
|
@@ -22,6 +23,14 @@ export function getOpenAIModel(apiKey: string, model: string) {
|
|
22 |
return openai(model);
|
23 |
}
|
24 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
export function getGroqModel(apiKey: string, model: string) {
|
26 |
const openai = createOpenAI({
|
27 |
baseURL: 'https://api.groq.com/openai/v1',
|
@@ -45,7 +54,8 @@ export function getOpenRouterModel(apiKey: string, model: string) {
|
|
45 |
|
46 |
export function getModel(provider: string, model: string, env: Env) {
|
47 |
const apiKey = getAPIKey(env, provider);
|
48 |
-
|
|
|
49 |
switch (provider) {
|
50 |
case 'Anthropic':
|
51 |
return getAnthropicModel(apiKey, model);
|
@@ -55,6 +65,8 @@ export function getModel(provider: string, model: string, env: Env) {
|
|
55 |
return getGroqModel(apiKey, model);
|
56 |
case 'OpenRouter':
|
57 |
return getOpenRouterModel(apiKey, model);
|
|
|
|
|
58 |
default:
|
59 |
return getOllamaModel(model);
|
60 |
}
|
|
|
3 |
import { getAPIKey } from '~/lib/.server/llm/api-key';
|
4 |
import { createAnthropic } from '@ai-sdk/anthropic';
|
5 |
import { createOpenAI } from '@ai-sdk/openai';
|
6 |
+
import { createGoogleGenerativeAI } from '@ai-sdk/google';
|
7 |
import { ollama } from 'ollama-ai-provider';
|
8 |
import { createOpenRouter } from "@openrouter/ai-sdk-provider";
|
9 |
|
|
|
23 |
return openai(model);
|
24 |
}
|
25 |
|
26 |
+
export function getGoogleModel(apiKey: string, model: string) {
|
27 |
+
const google = createGoogleGenerativeAI(
|
28 |
+
apiKey,
|
29 |
+
);
|
30 |
+
|
31 |
+
return google(model);
|
32 |
+
}
|
33 |
+
|
34 |
export function getGroqModel(apiKey: string, model: string) {
|
35 |
const openai = createOpenAI({
|
36 |
baseURL: 'https://api.groq.com/openai/v1',
|
|
|
54 |
|
55 |
export function getModel(provider: string, model: string, env: Env) {
|
56 |
const apiKey = getAPIKey(env, provider);
|
57 |
+
|
58 |
+
|
59 |
switch (provider) {
|
60 |
case 'Anthropic':
|
61 |
return getAnthropicModel(apiKey, model);
|
|
|
65 |
return getGroqModel(apiKey, model);
|
66 |
case 'OpenRouter':
|
67 |
return getOpenRouterModel(apiKey, model);
|
68 |
+
case 'Google':
|
69 |
+
return getGoogleModel(apiKey, model)
|
70 |
default:
|
71 |
return getOllamaModel(model);
|
72 |
}
|
app/utils/constants.ts
CHANGED
@@ -6,7 +6,13 @@ export const DEFAULT_MODEL = "claude-3-5-sonnet-20240620";
|
|
6 |
export const DEFAULT_PROVIDER = "Anthropic";
|
7 |
export const MODEL_LIST = [
|
8 |
{ name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet', provider: 'Anthropic' },
|
|
|
|
|
9 |
{ name: 'gpt-4o', label: 'GPT-4o', provider: 'OpenAI' },
|
|
|
|
|
|
|
|
|
10 |
{ name: 'qwen2.5-coder:7b', label: 'Qwen 2.5 Coder 7b', provider: 'Ollama' },
|
11 |
{ name: 'qwen2.5-coder:1.5b', label: 'Qwen 2.5 Coder 1.5b', provider: 'Ollama' },
|
12 |
{ name: 'deepseek-coder-v2:236b', label: 'DeepSeek-Coder-V2 236b', provider: 'Ollama' },
|
@@ -31,8 +37,4 @@ export const MODEL_LIST = [
|
|
31 |
{ name: 'claude-3-opus-20240229', label: 'Claude 3 Opus', provider: 'Anthropic' },
|
32 |
{ name: 'claude-3-sonnet-20240229', label: 'Claude 3 Sonnet', provider: 'Anthropic' },
|
33 |
{ name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic' },
|
34 |
-
|
35 |
-
{ name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI' },
|
36 |
-
{ name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI' },
|
37 |
-
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI' },
|
38 |
-
];
|
|
|
6 |
export const DEFAULT_PROVIDER = "Anthropic";
|
7 |
export const MODEL_LIST = [
|
8 |
{ name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet', provider: 'Anthropic' },
|
9 |
+
{ name: 'gemini-1.5-flash-latest', label: 'Gemini 1.5 Flash', provider: 'Google' },
|
10 |
+
{ name: 'gemini-1.5-pro-latest', label: 'Gemini 1.5 Pro', provider: 'Google'},
|
11 |
{ name: 'gpt-4o', label: 'GPT-4o', provider: 'OpenAI' },
|
12 |
+
{ name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI' },
|
13 |
+
{ name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI' },
|
14 |
+
{ name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI' },
|
15 |
+
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI' },
|
16 |
{ name: 'qwen2.5-coder:7b', label: 'Qwen 2.5 Coder 7b', provider: 'Ollama' },
|
17 |
{ name: 'qwen2.5-coder:1.5b', label: 'Qwen 2.5 Coder 1.5b', provider: 'Ollama' },
|
18 |
{ name: 'deepseek-coder-v2:236b', label: 'DeepSeek-Coder-V2 236b', provider: 'Ollama' },
|
|
|
37 |
{ name: 'claude-3-opus-20240229', label: 'Claude 3 Opus', provider: 'Anthropic' },
|
38 |
{ name: 'claude-3-sonnet-20240229', label: 'Claude 3 Sonnet', provider: 'Anthropic' },
|
39 |
{ name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic' },
|
40 |
+
];
|
|
|
|
|
|
|
|