mirror of
https://github.com/stackblitz-labs/bolt.diy
synced 2025-03-09 21:50:36 +00:00
fix: docker prod env variable fix (#1170)
* fix: docker prod env variable fix * lint and typecheck * removed hardcoded tag
This commit is contained in:
parent
5a0489f3c3
commit
660353360f
@ -40,7 +40,7 @@ export default class LMStudioProvider extends BaseProvider {
|
||||
* Running in Server
|
||||
* Backend: Check if we're running in Docker
|
||||
*/
|
||||
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
|
||||
const isDocker = process?.env?.RUNNING_IN_DOCKER === 'true' || serverEnv?.RUNNING_IN_DOCKER === 'true';
|
||||
|
||||
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
|
||||
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
|
||||
@ -58,7 +58,7 @@ export default class LMStudioProvider extends BaseProvider {
|
||||
}
|
||||
getModelInstance: (options: {
|
||||
model: string;
|
||||
serverEnv: Env;
|
||||
serverEnv?: Env;
|
||||
apiKeys?: Record<string, string>;
|
||||
providerSettings?: Record<string, IProviderSetting>;
|
||||
}) => LanguageModelV1 = (options) => {
|
||||
@ -75,8 +75,9 @@ export default class LMStudioProvider extends BaseProvider {
|
||||
throw new Error('No baseUrl found for LMStudio provider');
|
||||
}
|
||||
|
||||
const isDocker = process.env.RUNNING_IN_DOCKER === 'true' || serverEnv?.RUNNING_IN_DOCKER === 'true';
|
||||
|
||||
if (typeof window === 'undefined') {
|
||||
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
|
||||
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
|
||||
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
|
||||
}
|
||||
@ -84,7 +85,7 @@ export default class LMStudioProvider extends BaseProvider {
|
||||
logger.debug('LMStudio Base Url used: ', baseUrl);
|
||||
|
||||
const lmstudio = createOpenAI({
|
||||
baseUrl: `${baseUrl}/v1`,
|
||||
baseURL: `${baseUrl}/v1`,
|
||||
apiKey: '',
|
||||
});
|
||||
|
||||
|
@ -63,7 +63,7 @@ export default class OllamaProvider extends BaseProvider {
|
||||
* Running in Server
|
||||
* Backend: Check if we're running in Docker
|
||||
*/
|
||||
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
|
||||
const isDocker = process?.env?.RUNNING_IN_DOCKER === 'true' || serverEnv?.RUNNING_IN_DOCKER === 'true';
|
||||
|
||||
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
|
||||
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
|
||||
@ -83,7 +83,7 @@ export default class OllamaProvider extends BaseProvider {
|
||||
}
|
||||
getModelInstance: (options: {
|
||||
model: string;
|
||||
serverEnv: Env;
|
||||
serverEnv?: Env;
|
||||
apiKeys?: Record<string, string>;
|
||||
providerSettings?: Record<string, IProviderSetting>;
|
||||
}) => LanguageModelV1 = (options) => {
|
||||
@ -101,7 +101,7 @@ export default class OllamaProvider extends BaseProvider {
|
||||
throw new Error('No baseUrl found for OLLAMA provider');
|
||||
}
|
||||
|
||||
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
|
||||
const isDocker = process?.env?.RUNNING_IN_DOCKER === 'true' || serverEnv?.RUNNING_IN_DOCKER === 'true';
|
||||
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
|
||||
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
|
||||
|
||||
|
@ -41,11 +41,17 @@ function getProviderInfo(llmManager: LLMManager) {
|
||||
export async function loader({
|
||||
request,
|
||||
params,
|
||||
context,
|
||||
}: {
|
||||
request: Request;
|
||||
params: { provider?: string };
|
||||
context: {
|
||||
cloudflare?: {
|
||||
env: Record<string, string>;
|
||||
};
|
||||
};
|
||||
}): Promise<Response> {
|
||||
const llmManager = LLMManager.getInstance(import.meta.env);
|
||||
const llmManager = LLMManager.getInstance(context.cloudflare?.env);
|
||||
|
||||
// Get client side maintained API keys and provider settings from cookies
|
||||
const cookieHeader = request.headers.get('Cookie');
|
||||
@ -63,7 +69,7 @@ export async function loader({
|
||||
if (provider) {
|
||||
const staticModels = provider.staticModels;
|
||||
const dynamicModels = provider.getDynamicModels
|
||||
? await provider.getDynamicModels(apiKeys, providerSettings, import.meta.env)
|
||||
? await provider.getDynamicModels(apiKeys, providerSettings, context.cloudflare?.env)
|
||||
: [];
|
||||
modelList = [...staticModels, ...dynamicModels];
|
||||
}
|
||||
@ -72,7 +78,7 @@ export async function loader({
|
||||
modelList = await llmManager.updateModelList({
|
||||
apiKeys,
|
||||
providerSettings,
|
||||
serverEnv: import.meta.env,
|
||||
serverEnv: context.cloudflare?.env,
|
||||
});
|
||||
}
|
||||
|
||||
|
21
bindings.sh
21
bindings.sh
@ -2,14 +2,31 @@
|
||||
|
||||
bindings=""
|
||||
|
||||
while IFS= read -r line || [ -n "$line" ]; do
|
||||
# Function to extract variable names from the TypeScript interface
|
||||
extract_env_vars() {
|
||||
grep -o '[A-Z_]\+:' worker-configuration.d.ts | sed 's/://'
|
||||
}
|
||||
|
||||
# First try to read from .env.local if it exists
|
||||
if [ -f ".env.local" ]; then
|
||||
while IFS= read -r line || [ -n "$line" ]; do
|
||||
if [[ ! "$line" =~ ^# ]] && [[ -n "$line" ]]; then
|
||||
name=$(echo "$line" | cut -d '=' -f 1)
|
||||
value=$(echo "$line" | cut -d '=' -f 2-)
|
||||
value=$(echo $value | sed 's/^"\(.*\)"$/\1/')
|
||||
bindings+="--binding ${name}=${value} "
|
||||
fi
|
||||
done < .env.local
|
||||
done < .env.local
|
||||
else
|
||||
# If .env.local doesn't exist, use environment variables defined in .d.ts
|
||||
env_vars=($(extract_env_vars))
|
||||
# Generate bindings for each environment variable if it exists
|
||||
for var in "${env_vars[@]}"; do
|
||||
if [ -n "${!var}" ]; then
|
||||
bindings+="--binding ${var}=${!var} "
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
bindings=$(echo $bindings | sed 's/[[:space:]]*$//')
|
||||
|
||||
|
@ -72,3 +72,21 @@ services:
|
||||
- "5173:5173"
|
||||
command: pnpm run dev --host 0.0.0.0
|
||||
profiles: ["development", "default"]
|
||||
|
||||
app-prebuild:
|
||||
image: ghcr.io/stackblitz-labs/bolt.diy:latest
|
||||
ports:
|
||||
- "5173:5173"
|
||||
environment:
|
||||
- NODE_ENV=production
|
||||
- COMPOSE_PROFILES=production
|
||||
# No strictly needed but serving as hints for Coolify
|
||||
- PORT=5173
|
||||
- OLLAMA_API_BASE_URL=http://127.0.0.1:11434
|
||||
- DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX:-32768}
|
||||
- RUNNING_IN_DOCKER=true
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
command: pnpm run dockerstart
|
||||
profiles:
|
||||
- prebuilt
|
3
worker-configuration.d.ts
vendored
3
worker-configuration.d.ts
vendored
@ -1,5 +1,6 @@
|
||||
interface Env {
|
||||
DEFAULT_NUM_CTX:Settings;
|
||||
RUNNING_IN_DOCKER: Settings;
|
||||
DEFAULT_NUM_CTX: Settings;
|
||||
ANTHROPIC_API_KEY: string;
|
||||
OPENAI_API_KEY: string;
|
||||
GROQ_API_KEY: string;
|
||||
|
Loading…
Reference in New Issue
Block a user