Huggingface Models Integrated
Browse files- .env.example +7 -1
- .husky/commit-msg +0 -7
- CONTRIBUTING.md +1 -0
- Dockerfile +4 -0
- app/lib/.server/llm/api-key.ts +2 -0
- app/lib/.server/llm/model.ts +11 -0
- app/utils/constants.ts +13 -1
- docker-compose.yaml +2 -0
- worker-configuration.d.ts +1 -0
- yarn.lock +0 -0
.env.example
CHANGED
@@ -5,6 +5,12 @@
|
|
5 |
# You only need this environment variable set if you want to use Groq models
|
6 |
GROQ_API_KEY=
|
7 |
|
|
|
|
|
|
|
|
|
|
|
|
|
8 |
# Get your Open AI API Key by following these instructions -
|
9 |
# https://help.openai.com/en/articles/4936850-where-do-i-find-my-openai-api-key
|
10 |
# You only need this environment variable set if you want to use GPT models
|
@@ -55,4 +61,4 @@ LMSTUDIO_API_BASE_URL=
|
|
55 |
XAI_API_KEY=
|
56 |
|
57 |
# Include this environment variable if you want more logging for debugging locally
|
58 |
-
VITE_LOG_LEVEL=debug
|
|
|
5 |
# You only need this environment variable set if you want to use Groq models
|
6 |
GROQ_API_KEY=
|
7 |
|
8 |
+
# Get your HuggingFace API Key here -
|
9 |
+
# https://huggingface.co/settings/tokens
|
10 |
+
# You only need this environment variable set if you want to use HuggingFace models
|
11 |
+
HuggingFace_API_KEY=
|
12 |
+
|
13 |
+
|
14 |
# Get your Open AI API Key by following these instructions -
|
15 |
# https://help.openai.com/en/articles/4936850-where-do-i-find-my-openai-api-key
|
16 |
# You only need this environment variable set if you want to use GPT models
|
|
|
61 |
XAI_API_KEY=
|
62 |
|
63 |
# Include this environment variable if you want more logging for debugging locally
|
64 |
+
VITE_LOG_LEVEL=debug
|
.husky/commit-msg
DELETED
@@ -1,7 +0,0 @@
|
|
1 |
-
#!/usr/bin/env sh
|
2 |
-
|
3 |
-
. "$(dirname "$0")/_/husky.sh"
|
4 |
-
|
5 |
-
npx commitlint --edit $1
|
6 |
-
|
7 |
-
exit 0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
CONTRIBUTING.md
CHANGED
@@ -72,6 +72,7 @@ pnpm install
|
|
72 |
- Add your LLM API keys (only set the ones you plan to use):
|
73 |
```bash
|
74 |
GROQ_API_KEY=XXX
|
|
|
75 |
OPENAI_API_KEY=XXX
|
76 |
ANTHROPIC_API_KEY=XXX
|
77 |
...
|
|
|
72 |
- Add your LLM API keys (only set the ones you plan to use):
|
73 |
```bash
|
74 |
GROQ_API_KEY=XXX
|
75 |
+
HuggingFace_API_KEY=XXX
|
76 |
OPENAI_API_KEY=XXX
|
77 |
ANTHROPIC_API_KEY=XXX
|
78 |
...
|
Dockerfile
CHANGED
@@ -19,6 +19,7 @@ FROM base AS bolt-ai-production
|
|
19 |
|
20 |
# Define environment variables with default values or let them be overridden
|
21 |
ARG GROQ_API_KEY
|
|
|
22 |
ARG OPENAI_API_KEY
|
23 |
ARG ANTHROPIC_API_KEY
|
24 |
ARG OPEN_ROUTER_API_KEY
|
@@ -28,6 +29,7 @@ ARG VITE_LOG_LEVEL=debug
|
|
28 |
|
29 |
ENV WRANGLER_SEND_METRICS=false \
|
30 |
GROQ_API_KEY=${GROQ_API_KEY} \
|
|
|
31 |
OPENAI_API_KEY=${OPENAI_API_KEY} \
|
32 |
ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} \
|
33 |
OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY} \
|
@@ -48,6 +50,7 @@ FROM base AS bolt-ai-development
|
|
48 |
|
49 |
# Define the same environment variables for development
|
50 |
ARG GROQ_API_KEY
|
|
|
51 |
ARG OPENAI_API_KEY
|
52 |
ARG ANTHROPIC_API_KEY
|
53 |
ARG OPEN_ROUTER_API_KEY
|
@@ -56,6 +59,7 @@ ARG OLLAMA_API_BASE_URL
|
|
56 |
ARG VITE_LOG_LEVEL=debug
|
57 |
|
58 |
ENV GROQ_API_KEY=${GROQ_API_KEY} \
|
|
|
59 |
OPENAI_API_KEY=${OPENAI_API_KEY} \
|
60 |
ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} \
|
61 |
OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY} \
|
|
|
19 |
|
20 |
# Define environment variables with default values or let them be overridden
|
21 |
ARG GROQ_API_KEY
|
22 |
+
ARG HuggingFace_API_KEY
|
23 |
ARG OPENAI_API_KEY
|
24 |
ARG ANTHROPIC_API_KEY
|
25 |
ARG OPEN_ROUTER_API_KEY
|
|
|
29 |
|
30 |
ENV WRANGLER_SEND_METRICS=false \
|
31 |
GROQ_API_KEY=${GROQ_API_KEY} \
|
32 |
+
HuggingFace_KEY=${HuggingFace_API_KEY} \
|
33 |
OPENAI_API_KEY=${OPENAI_API_KEY} \
|
34 |
ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} \
|
35 |
OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY} \
|
|
|
50 |
|
51 |
# Define the same environment variables for development
|
52 |
ARG GROQ_API_KEY
|
53 |
+
ARG HuggingFace
|
54 |
ARG OPENAI_API_KEY
|
55 |
ARG ANTHROPIC_API_KEY
|
56 |
ARG OPEN_ROUTER_API_KEY
|
|
|
59 |
ARG VITE_LOG_LEVEL=debug
|
60 |
|
61 |
ENV GROQ_API_KEY=${GROQ_API_KEY} \
|
62 |
+
HuggingFace_API_KEY=${HuggingFace_API_KEY} \
|
63 |
OPENAI_API_KEY=${OPENAI_API_KEY} \
|
64 |
ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} \
|
65 |
OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY} \
|
app/lib/.server/llm/api-key.ts
CHANGED
@@ -23,6 +23,8 @@ export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Re
|
|
23 |
return env.GOOGLE_GENERATIVE_AI_API_KEY || cloudflareEnv.GOOGLE_GENERATIVE_AI_API_KEY;
|
24 |
case 'Groq':
|
25 |
return env.GROQ_API_KEY || cloudflareEnv.GROQ_API_KEY;
|
|
|
|
|
26 |
case 'OpenRouter':
|
27 |
return env.OPEN_ROUTER_API_KEY || cloudflareEnv.OPEN_ROUTER_API_KEY;
|
28 |
case 'Deepseek':
|
|
|
23 |
return env.GOOGLE_GENERATIVE_AI_API_KEY || cloudflareEnv.GOOGLE_GENERATIVE_AI_API_KEY;
|
24 |
case 'Groq':
|
25 |
return env.GROQ_API_KEY || cloudflareEnv.GROQ_API_KEY;
|
26 |
+
case 'HuggingFace':
|
27 |
+
return env.HuggingFace_API_KEY || cloudflareEnv.HuggingFace_API_KEY;
|
28 |
case 'OpenRouter':
|
29 |
return env.OPEN_ROUTER_API_KEY || cloudflareEnv.OPEN_ROUTER_API_KEY;
|
30 |
case 'Deepseek':
|
app/lib/.server/llm/model.ts
CHANGED
@@ -56,6 +56,15 @@ export function getGroqModel(apiKey: string, model: string) {
|
|
56 |
return openai(model);
|
57 |
}
|
58 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
export function getOllamaModel(baseURL: string, model: string) {
|
60 |
let Ollama = ollama(model, {
|
61 |
numCtx: 32768,
|
@@ -110,6 +119,8 @@ export function getModel(provider: string, model: string, env: Env, apiKeys?: Re
|
|
110 |
return getOpenAIModel(apiKey, model);
|
111 |
case 'Groq':
|
112 |
return getGroqModel(apiKey, model);
|
|
|
|
|
113 |
case 'OpenRouter':
|
114 |
return getOpenRouterModel(apiKey, model);
|
115 |
case 'Google':
|
|
|
56 |
return openai(model);
|
57 |
}
|
58 |
|
59 |
+
export function getHuggingFaceModel(apiKey: string, model: string) {
|
60 |
+
const openai = createOpenAI({
|
61 |
+
baseURL: 'https://api-inference.huggingface.co/v1/',
|
62 |
+
apiKey,
|
63 |
+
});
|
64 |
+
|
65 |
+
return openai(model);
|
66 |
+
}
|
67 |
+
|
68 |
export function getOllamaModel(baseURL: string, model: string) {
|
69 |
let Ollama = ollama(model, {
|
70 |
numCtx: 32768,
|
|
|
119 |
return getOpenAIModel(apiKey, model);
|
120 |
case 'Groq':
|
121 |
return getGroqModel(apiKey, model);
|
122 |
+
case 'HuggingFace':
|
123 |
+
return getHuggingFaceModel(apiKey, model);
|
124 |
case 'OpenRouter':
|
125 |
return getOpenRouterModel(apiKey, model);
|
126 |
case 'Google':
|
app/utils/constants.ts
CHANGED
@@ -71,7 +71,19 @@ const PROVIDER_LIST: ProviderInfo[] = [
|
|
71 |
{ name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq' }
|
72 |
],
|
73 |
getApiKeyLink: 'https://console.groq.com/keys'
|
74 |
-
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
75 |
name: 'OpenAI',
|
76 |
staticModels: [
|
77 |
{ name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI' },
|
|
|
71 |
{ name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq' }
|
72 |
],
|
73 |
getApiKeyLink: 'https://console.groq.com/keys'
|
74 |
+
},
|
75 |
+
{
|
76 |
+
name: 'HuggingFace',
|
77 |
+
staticModels: [
|
78 |
+
{ name: 'Qwen/Qwen2.5-Coder-32B-Instruct', label: 'Qwen2.5-Coder-32B-Instruct (HuggingFace)', provider: 'HuggingFace' },
|
79 |
+
{ name: '01-ai/Yi-1.5-34B-Chat', label: 'Yi-1.5-34B-Chat (HuggingFace)', provider: 'HuggingFace' },
|
80 |
+
{ name: 'codellama/CodeLlama-34b-Instruct-hf', label: 'CodeLlama-34b-Instruct (HuggingFace)', provider: 'HuggingFace' },
|
81 |
+
{ name: 'NousResearch/Hermes-3-Llama-3.1-8B', label: 'Hermes-3-Llama-3.1-8B (HuggingFace)', provider: 'HuggingFace' }
|
82 |
+
],
|
83 |
+
getApiKeyLink: 'https://huggingface.co/settings/tokens'
|
84 |
+
},
|
85 |
+
|
86 |
+
{
|
87 |
name: 'OpenAI',
|
88 |
staticModels: [
|
89 |
{ name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI' },
|
docker-compose.yaml
CHANGED
@@ -14,6 +14,7 @@ services:
|
|
14 |
# No strictly neded but serving as hints for Coolify
|
15 |
- PORT=5173
|
16 |
- GROQ_API_KEY=${GROQ_API_KEY}
|
|
|
17 |
- OPENAI_API_KEY=${OPENAI_API_KEY}
|
18 |
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY}
|
19 |
- OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY}
|
@@ -40,6 +41,7 @@ services:
|
|
40 |
- WATCHPACK_POLLING=true
|
41 |
- PORT=5173
|
42 |
- GROQ_API_KEY=${GROQ_API_KEY}
|
|
|
43 |
- OPENAI_API_KEY=${OPENAI_API_KEY}
|
44 |
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY}
|
45 |
- OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY}
|
|
|
14 |
# No strictly neded but serving as hints for Coolify
|
15 |
- PORT=5173
|
16 |
- GROQ_API_KEY=${GROQ_API_KEY}
|
17 |
+
- HuggingFace_API_KEY=${HuggingFace_API_KEY}
|
18 |
- OPENAI_API_KEY=${OPENAI_API_KEY}
|
19 |
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY}
|
20 |
- OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY}
|
|
|
41 |
- WATCHPACK_POLLING=true
|
42 |
- PORT=5173
|
43 |
- GROQ_API_KEY=${GROQ_API_KEY}
|
44 |
+
- HuggingFace_API_KEY=${HuggingFace_API_KEY}
|
45 |
- OPENAI_API_KEY=${OPENAI_API_KEY}
|
46 |
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY}
|
47 |
- OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY}
|
worker-configuration.d.ts
CHANGED
@@ -2,6 +2,7 @@ interface Env {
|
|
2 |
ANTHROPIC_API_KEY: string;
|
3 |
OPENAI_API_KEY: string;
|
4 |
GROQ_API_KEY: string;
|
|
|
5 |
OPEN_ROUTER_API_KEY: string;
|
6 |
OLLAMA_API_BASE_URL: string;
|
7 |
OPENAI_LIKE_API_KEY: string;
|
|
|
2 |
ANTHROPIC_API_KEY: string;
|
3 |
OPENAI_API_KEY: string;
|
4 |
GROQ_API_KEY: string;
|
5 |
+
HuggingFace_API_KEY: string;
|
6 |
OPEN_ROUTER_API_KEY: string;
|
7 |
OLLAMA_API_BASE_URL: string;
|
8 |
OPENAI_LIKE_API_KEY: string;
|
yarn.lock
ADDED
The diff for this file is too large to render.
See raw diff
|
|