Merge pull request #51 from zenith110/main
Browse filesAdding Deepseek coder + Deepseek chat with the Deepseek API.
- app/lib/.server/llm/api-key.ts +2 -0
- app/lib/.server/llm/model.ts +10 -0
- app/utils/constants.ts +2 -0
- worker-configuration.d.ts +1 -0
app/lib/.server/llm/api-key.ts
CHANGED
@@ -19,6 +19,8 @@ export function getAPIKey(cloudflareEnv: Env, provider: string) {
|
|
19 |
return env.GROQ_API_KEY || cloudflareEnv.GROQ_API_KEY;
|
20 |
case 'OpenRouter':
|
21 |
return env.OPEN_ROUTER_API_KEY || cloudflareEnv.OPEN_ROUTER_API_KEY;
|
|
|
|
|
22 |
case 'Mistral':
|
23 |
return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY;
|
24 |
default:
|
|
|
19 |
return env.GROQ_API_KEY || cloudflareEnv.GROQ_API_KEY;
|
20 |
case 'OpenRouter':
|
21 |
return env.OPEN_ROUTER_API_KEY || cloudflareEnv.OPEN_ROUTER_API_KEY;
|
22 |
+
case 'Deepseek':
|
23 |
+
return env.DEEPSEEK_API_KEY || cloudflareEnv.DEEPSEEK_API_KEY
|
24 |
case 'Mistral':
|
25 |
return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY;
|
26 |
default:
|
app/lib/.server/llm/model.ts
CHANGED
@@ -50,6 +50,14 @@ export function getGroqModel(apiKey: string, model: string) {
|
|
50 |
return openai(model);
|
51 |
}
|
52 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
53 |
export function getOllamaModel(model: string) {
|
54 |
return ollama(model);
|
55 |
}
|
@@ -77,6 +85,8 @@ export function getModel(provider: string, model: string, env: Env) {
|
|
77 |
return getOpenRouterModel(apiKey, model);
|
78 |
case 'Google':
|
79 |
return getGoogleModel(apiKey, model)
|
|
|
|
|
80 |
case 'Mistral':
|
81 |
return getMistralModel(apiKey, model);
|
82 |
default:
|
|
|
50 |
return openai(model);
|
51 |
}
|
52 |
|
53 |
+
export function getDeepseekModel(apiKey: string, model: string){
|
54 |
+
const openai = createOpenAI({
|
55 |
+
baseURL: 'https://api.deepseek.com/beta',
|
56 |
+
apiKey,
|
57 |
+
});
|
58 |
+
|
59 |
+
return openai(model);
|
60 |
+
}
|
61 |
export function getOllamaModel(model: string) {
|
62 |
return ollama(model);
|
63 |
}
|
|
|
85 |
return getOpenRouterModel(apiKey, model);
|
86 |
case 'Google':
|
87 |
return getGoogleModel(apiKey, model)
|
88 |
+
case 'Deepseek':
|
89 |
+
return getDeepseekModel(apiKey, model)
|
90 |
case 'Mistral':
|
91 |
return getMistralModel(apiKey, model);
|
92 |
default:
|
app/utils/constants.ts
CHANGED
@@ -32,6 +32,8 @@ const staticModels: ModelInfo[] = [
|
|
32 |
{ name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI' },
|
33 |
{ name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI' },
|
34 |
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI' },
|
|
|
|
|
35 |
{ name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral' },
|
36 |
{ name: 'open-mixtral-8x7b', label: 'Mistral 8x7B', provider: 'Mistral' },
|
37 |
{ name: 'open-mixtral-8x22b', label: 'Mistral 8x22B', provider: 'Mistral' },
|
|
|
32 |
{ name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI' },
|
33 |
{ name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI' },
|
34 |
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI' },
|
35 |
+
{ name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek'},
|
36 |
+
{ name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek'},
|
37 |
{ name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral' },
|
38 |
{ name: 'open-mixtral-8x7b', label: 'Mistral 8x7B', provider: 'Mistral' },
|
39 |
{ name: 'open-mixtral-8x22b', label: 'Mistral 8x22B', provider: 'Mistral' },
|
worker-configuration.d.ts
CHANGED
@@ -4,4 +4,5 @@ interface Env {
|
|
4 |
GROQ_API_KEY: string;
|
5 |
OPEN_ROUTER_API_KEY: string;
|
6 |
OLLAMA_API_BASE_URL: string;
|
|
|
7 |
}
|
|
|
4 |
GROQ_API_KEY: string;
|
5 |
OPEN_ROUTER_API_KEY: string;
|
6 |
OLLAMA_API_BASE_URL: string;
|
7 |
+
DEEPSEEK_API_KEY: string;
|
8 |
}
|