fix: docker prod env variable fix (#1170)
Browse files* fix: docker prod env variable fix
* lint and typecheck
* removed hardcoded tag
- app/lib/modules/llm/providers/lmstudio.ts +5 -4
- app/lib/modules/llm/providers/ollama.ts +3 -3
- app/routes/api.models.ts +9 -3
- bindings.sh +26 -9
- docker-compose.yaml +18 -0
- worker-configuration.d.ts +2 -1
app/lib/modules/llm/providers/lmstudio.ts
CHANGED
@@ -40,7 +40,7 @@ export default class LMStudioProvider extends BaseProvider {
|
|
40 |
* Running in Server
|
41 |
* Backend: Check if we're running in Docker
|
42 |
*/
|
43 |
-
const isDocker = process
|
44 |
|
45 |
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
|
46 |
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
|
@@ -58,7 +58,7 @@ export default class LMStudioProvider extends BaseProvider {
|
|
58 |
}
|
59 |
getModelInstance: (options: {
|
60 |
model: string;
|
61 |
-
serverEnv
|
62 |
apiKeys?: Record<string, string>;
|
63 |
providerSettings?: Record<string, IProviderSetting>;
|
64 |
}) => LanguageModelV1 = (options) => {
|
@@ -75,8 +75,9 @@ export default class LMStudioProvider extends BaseProvider {
|
|
75 |
throw new Error('No baseUrl found for LMStudio provider');
|
76 |
}
|
77 |
|
|
|
|
|
78 |
if (typeof window === 'undefined') {
|
79 |
-
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
|
80 |
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
|
81 |
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
|
82 |
}
|
@@ -84,7 +85,7 @@ export default class LMStudioProvider extends BaseProvider {
|
|
84 |
logger.debug('LMStudio Base Url used: ', baseUrl);
|
85 |
|
86 |
const lmstudio = createOpenAI({
|
87 |
-
|
88 |
apiKey: '',
|
89 |
});
|
90 |
|
|
|
40 |
* Running in Server
|
41 |
* Backend: Check if we're running in Docker
|
42 |
*/
|
43 |
+
const isDocker = process?.env?.RUNNING_IN_DOCKER === 'true' || serverEnv?.RUNNING_IN_DOCKER === 'true';
|
44 |
|
45 |
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
|
46 |
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
|
|
|
58 |
}
|
59 |
getModelInstance: (options: {
|
60 |
model: string;
|
61 |
+
serverEnv?: Env;
|
62 |
apiKeys?: Record<string, string>;
|
63 |
providerSettings?: Record<string, IProviderSetting>;
|
64 |
}) => LanguageModelV1 = (options) => {
|
|
|
75 |
throw new Error('No baseUrl found for LMStudio provider');
|
76 |
}
|
77 |
|
78 |
+
const isDocker = process.env.RUNNING_IN_DOCKER === 'true' || serverEnv?.RUNNING_IN_DOCKER === 'true';
|
79 |
+
|
80 |
if (typeof window === 'undefined') {
|
|
|
81 |
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
|
82 |
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
|
83 |
}
|
|
|
85 |
logger.debug('LMStudio Base Url used: ', baseUrl);
|
86 |
|
87 |
const lmstudio = createOpenAI({
|
88 |
+
baseURL: `${baseUrl}/v1`,
|
89 |
apiKey: '',
|
90 |
});
|
91 |
|
app/lib/modules/llm/providers/ollama.ts
CHANGED
@@ -63,7 +63,7 @@ export default class OllamaProvider extends BaseProvider {
|
|
63 |
* Running in Server
|
64 |
* Backend: Check if we're running in Docker
|
65 |
*/
|
66 |
-
const isDocker = process
|
67 |
|
68 |
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
|
69 |
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
|
@@ -83,7 +83,7 @@ export default class OllamaProvider extends BaseProvider {
|
|
83 |
}
|
84 |
getModelInstance: (options: {
|
85 |
model: string;
|
86 |
-
serverEnv
|
87 |
apiKeys?: Record<string, string>;
|
88 |
providerSettings?: Record<string, IProviderSetting>;
|
89 |
}) => LanguageModelV1 = (options) => {
|
@@ -101,7 +101,7 @@ export default class OllamaProvider extends BaseProvider {
|
|
101 |
throw new Error('No baseUrl found for OLLAMA provider');
|
102 |
}
|
103 |
|
104 |
-
const isDocker = process
|
105 |
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
|
106 |
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
|
107 |
|
|
|
63 |
* Running in Server
|
64 |
* Backend: Check if we're running in Docker
|
65 |
*/
|
66 |
+
const isDocker = process?.env?.RUNNING_IN_DOCKER === 'true' || serverEnv?.RUNNING_IN_DOCKER === 'true';
|
67 |
|
68 |
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
|
69 |
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
|
|
|
83 |
}
|
84 |
getModelInstance: (options: {
|
85 |
model: string;
|
86 |
+
serverEnv?: Env;
|
87 |
apiKeys?: Record<string, string>;
|
88 |
providerSettings?: Record<string, IProviderSetting>;
|
89 |
}) => LanguageModelV1 = (options) => {
|
|
|
101 |
throw new Error('No baseUrl found for OLLAMA provider');
|
102 |
}
|
103 |
|
104 |
+
const isDocker = process?.env?.RUNNING_IN_DOCKER === 'true' || serverEnv?.RUNNING_IN_DOCKER === 'true';
|
105 |
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
|
106 |
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
|
107 |
|
app/routes/api.models.ts
CHANGED
@@ -41,11 +41,17 @@ function getProviderInfo(llmManager: LLMManager) {
|
|
41 |
export async function loader({
|
42 |
request,
|
43 |
params,
|
|
|
44 |
}: {
|
45 |
request: Request;
|
46 |
params: { provider?: string };
|
|
|
|
|
|
|
|
|
|
|
47 |
}): Promise<Response> {
|
48 |
-
const llmManager = LLMManager.getInstance(
|
49 |
|
50 |
// Get client side maintained API keys and provider settings from cookies
|
51 |
const cookieHeader = request.headers.get('Cookie');
|
@@ -63,7 +69,7 @@ export async function loader({
|
|
63 |
if (provider) {
|
64 |
const staticModels = provider.staticModels;
|
65 |
const dynamicModels = provider.getDynamicModels
|
66 |
-
? await provider.getDynamicModels(apiKeys, providerSettings,
|
67 |
: [];
|
68 |
modelList = [...staticModels, ...dynamicModels];
|
69 |
}
|
@@ -72,7 +78,7 @@ export async function loader({
|
|
72 |
modelList = await llmManager.updateModelList({
|
73 |
apiKeys,
|
74 |
providerSettings,
|
75 |
-
serverEnv:
|
76 |
});
|
77 |
}
|
78 |
|
|
|
41 |
export async function loader({
|
42 |
request,
|
43 |
params,
|
44 |
+
context,
|
45 |
}: {
|
46 |
request: Request;
|
47 |
params: { provider?: string };
|
48 |
+
context: {
|
49 |
+
cloudflare?: {
|
50 |
+
env: Record<string, string>;
|
51 |
+
};
|
52 |
+
};
|
53 |
}): Promise<Response> {
|
54 |
+
const llmManager = LLMManager.getInstance(context.cloudflare?.env);
|
55 |
|
56 |
// Get client side maintained API keys and provider settings from cookies
|
57 |
const cookieHeader = request.headers.get('Cookie');
|
|
|
69 |
if (provider) {
|
70 |
const staticModels = provider.staticModels;
|
71 |
const dynamicModels = provider.getDynamicModels
|
72 |
+
? await provider.getDynamicModels(apiKeys, providerSettings, context.cloudflare?.env)
|
73 |
: [];
|
74 |
modelList = [...staticModels, ...dynamicModels];
|
75 |
}
|
|
|
78 |
modelList = await llmManager.updateModelList({
|
79 |
apiKeys,
|
80 |
providerSettings,
|
81 |
+
serverEnv: context.cloudflare?.env,
|
82 |
});
|
83 |
}
|
84 |
|
bindings.sh
CHANGED
@@ -2,15 +2,32 @@
|
|
2 |
|
3 |
bindings=""
|
4 |
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
|
14 |
bindings=$(echo $bindings | sed 's/[[:space:]]*$//')
|
15 |
|
16 |
-
echo $bindings
|
|
|
2 |
|
3 |
bindings=""
|
4 |
|
5 |
+
# Function to extract variable names from the TypeScript interface
|
6 |
+
extract_env_vars() {
|
7 |
+
grep -o '[A-Z_]\+:' worker-configuration.d.ts | sed 's/://'
|
8 |
+
}
|
9 |
+
|
10 |
+
# First try to read from .env.local if it exists
|
11 |
+
if [ -f ".env.local" ]; then
|
12 |
+
while IFS= read -r line || [ -n "$line" ]; do
|
13 |
+
if [[ ! "$line" =~ ^# ]] && [[ -n "$line" ]]; then
|
14 |
+
name=$(echo "$line" | cut -d '=' -f 1)
|
15 |
+
value=$(echo "$line" | cut -d '=' -f 2-)
|
16 |
+
value=$(echo $value | sed 's/^"\(.*\)"$/\1/')
|
17 |
+
bindings+="--binding ${name}=${value} "
|
18 |
+
fi
|
19 |
+
done < .env.local
|
20 |
+
else
|
21 |
+
# If .env.local doesn't exist, use environment variables defined in .d.ts
|
22 |
+
env_vars=($(extract_env_vars))
|
23 |
+
# Generate bindings for each environment variable if it exists
|
24 |
+
for var in "${env_vars[@]}"; do
|
25 |
+
if [ -n "${!var}" ]; then
|
26 |
+
bindings+="--binding ${var}=${!var} "
|
27 |
+
fi
|
28 |
+
done
|
29 |
+
fi
|
30 |
|
31 |
bindings=$(echo $bindings | sed 's/[[:space:]]*$//')
|
32 |
|
33 |
+
echo $bindings
|
docker-compose.yaml
CHANGED
@@ -72,3 +72,21 @@ services:
|
|
72 |
- "5173:5173"
|
73 |
command: pnpm run dev --host 0.0.0.0
|
74 |
profiles: ["development", "default"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
72 |
- "5173:5173"
|
73 |
command: pnpm run dev --host 0.0.0.0
|
74 |
profiles: ["development", "default"]
|
75 |
+
|
76 |
+
app-prebuild:
|
77 |
+
image: ghcr.io/stackblitz-labs/bolt.diy:latest
|
78 |
+
ports:
|
79 |
+
- "5173:5173"
|
80 |
+
environment:
|
81 |
+
- NODE_ENV=production
|
82 |
+
- COMPOSE_PROFILES=production
|
83 |
+
# No strictly needed but serving as hints for Coolify
|
84 |
+
- PORT=5173
|
85 |
+
- OLLAMA_API_BASE_URL=http://127.0.0.1:11434
|
86 |
+
- DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX:-32768}
|
87 |
+
- RUNNING_IN_DOCKER=true
|
88 |
+
extra_hosts:
|
89 |
+
- "host.docker.internal:host-gateway"
|
90 |
+
command: pnpm run dockerstart
|
91 |
+
profiles:
|
92 |
+
- prebuilt
|
worker-configuration.d.ts
CHANGED
@@ -1,5 +1,6 @@
|
|
1 |
interface Env {
|
2 |
-
|
|
|
3 |
ANTHROPIC_API_KEY: string;
|
4 |
OPENAI_API_KEY: string;
|
5 |
GROQ_API_KEY: string;
|
|
|
1 |
interface Env {
|
2 |
+
RUNNING_IN_DOCKER: Settings;
|
3 |
+
DEFAULT_NUM_CTX: Settings;
|
4 |
ANTHROPIC_API_KEY: string;
|
5 |
OPENAI_API_KEY: string;
|
6 |
GROQ_API_KEY: string;
|