-
Notifications
You must be signed in to change notification settings - Fork 334
Open
Description
Hi,
I am trying to get the self hosted version running with Caddy as my reverse proxy but have my TLS/SSL enpoint before.
I got it running so far that I can access my dashboard but when I try to register I get blocked by the browser because of mixed active content.
The error on the UI:

The error on the browser console:
![]()
The setup is a followed:
TLS/SSL Enpoint (Host of my Alpine VM) --> Alpine VM (Host of my Docker setup) --> Docker Container (Caddy, Dashboard, Worker, API, Clickhouse, DB, KV)
Thanks in advance. If you need anymore information or anything is unclear pleas reach out!
The content of my .env file
SELF_HOSTED="true"
BATCH_SIZE="5000"
BATCH_INTERVAL="10000"
ALLOW_REGISTRATION="false"
ALLOW_INVITATION="true"
REDIS_URL="redis://op-kv:6379"
CLICKHOUSE_URL="http://op-ch:8123/openpanel"
DATABASE_URL="postgresql://postgres:postgres@op-db:5432/postgres?schema=public"
DATABASE_URL_DIRECT="postgresql://postgres:postgres@op-db:5432/postgres?schema=public"
DASHBOARD_URL="http://op-dashboard:3000"
API_URL="http://op-api:3000"
COOKIE_SECRET="superSecert"
EMAIL_SENDER="no-reply@example.com"
RESEND_API_KEY="re_APIKey"
CORS_ORIGIN=https://example.com
The content of my docker-compose.yml
services:
op-proxy:
image: caddy:2-alpine
restart: always
ports:
- "80:80"
volumes:
- ./caddy/Caddyfile:/etc/caddy/Caddyfile
depends_on:
op-dashboard:
condition: service_healthy
op-api:
condition: service_healthy
logging:
driver: "json-file"
options:
max-size: "10m"
max-file: "3"
op-db:
image: postgres:14-alpine
restart: always
volumes:
- op-db-data:/var/lib/postgresql/data
healthcheck:
test:
- CMD-SHELL
- pg_isready -U postgres
interval: 10s
timeout: 5s
retries: 5
environment:
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
logging:
driver: json-file
options:
max-size: 10m
max-file: '3'
op-kv:
image: redis:7.2.5-alpine
restart: always
volumes:
- op-kv-data:/data
command:
- redis-server
- '--maxmemory-policy'
- noeviction
healthcheck:
test:
- CMD-SHELL
- redis-cli ping
interval: 10s
timeout: 5s
retries: 5
logging:
driver: json-file
options:
max-size: 10m
max-file: '3'
op-ch:
image: clickhouse/clickhouse-server:25.10.2.65
restart: always
volumes:
- op-ch-data:/var/lib/clickhouse
- op-ch-logs:/var/log/clickhouse-server
- ./clickhouse/clickhouse-config.xml:/etc/clickhouse-server/config.d/op-config.xml:ro
- ./clickhouse/clickhouse-user-config.xml:/etc/clickhouse-server/users.d/op-user-config.xml:ro
- ./clickhouse/init-db.sh:/docker-entrypoint-initdb.d/init-db.sh:ro
environment:
- CLICKHOUSE_SKIP_USER_SETUP=1
healthcheck:
test:
- CMD-SHELL
- clickhouse-client --query "SELECT 1"
interval: 10s
timeout: 5s
retries: 5
ulimits:
nofile:
soft: 262144
hard: 262144
logging:
driver: json-file
options:
max-size: 10m
max-file: '3'
op-api:
image: lindesvard/openpanel-api:2
restart: always
command: |
sh -c "
echo 'Running migrations...'
CI=true pnpm -r run migrate:deploy
pnpm start
"
healthcheck:
test:
- CMD-SHELL
- curl -f http://localhost:3000/healthcheck || exit 1
interval: 10s
timeout: 5s
retries: 5
depends_on:
op-db:
condition: service_healthy
op-ch:
condition: service_healthy
op-kv:
condition: service_healthy
env_file:
- .env
logging:
driver: json-file
options:
max-size: 50m
max-file: '3'
op-dashboard:
image: lindesvard/openpanel-dashboard:2
restart: always
depends_on:
op-api:
condition: service_healthy
env_file:
- .env
healthcheck:
test:
- CMD-SHELL
- curl -f http://localhost:3000/api/healthcheck || exit 1
interval: 10s
timeout: 5s
retries: 5
logging:
driver: json-file
options:
max-size: 20m
max-file: '3'
op-worker:
image: lindesvard/openpanel-worker:2
restart: always
depends_on:
op-api:
condition: service_healthy
env_file:
- .env
healthcheck:
test:
- CMD-SHELL
- curl -f http://localhost:3000/healthcheck || exit 1
interval: 10s
timeout: 5s
retries: 5
deploy:
mode: replicated
replicas: 2
logging:
driver: json-file
options:
max-size: 30m
max-file: '3'
volumes:
op-db-data:
driver: local
op-kv-data:
driver: local
op-ch-data:
driver: local
op-ch-logs:
driver: local
The content of the Caddyfile
{
auto_https off
admin off
servers {
trusted_proxies static private_ranges
}
}
:80, :443 {
@quality host quality.example.com
handle @quality {
encode gzip
handle_path /api/* {
reverse_proxy op-api:3000
}
reverse_proxy op-dashboard:3000
}
@worker host worker.quality.example.com
handle @worker {
encode gzip
basic_auth {
admin SuperSecret
}
reverse_proxy op-worker:3000
}
}
My running docker containers:

Reactions are currently unavailable
Metadata
Metadata
Assignees
Labels
No labels