Skip to content

Commit 8a7f6d9

Browse files
authored
chore(process_updates): move away from docker check to celery (#195)
Just because using docker healthchecks for this bothers me. Plus, now it's easy to disable process_updates for testing and we have some form of record of the responses.
1 parent 0884f16 commit 8a7f6d9

25 files changed

Lines changed: 1330 additions & 22 deletions

.envs/.local/.celery

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
CELERY_BROKER_URL=redis://scram-redis-1:6379/2
2+
CELERY_RESULT_BACKEND=redis://scram-redis-1:6379/2
3+
SCRAM_API_URL=http://django:8000/

.envs/.local/.celery-secondary

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
CELERY_BROKER_URL=redis://scram-redis-2:6379/2
2+
CELERY_RESULT_BACKEND=redis://scram-redis-2:6379/2
3+
SCRAM_API_URL=http://django-secondary:8000/

.github/workflows/behave.yml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,9 @@ jobs:
7070
- name: Upload Coverage to Coveralls
7171
if: matrix.python-version == '3.12'
7272
uses: coverallsapp/github-action@v2
73+
with:
74+
parallel: true
75+
flag-name: behave
7376

7477
- name: Upload Coverage to GitHub
7578
if: matrix.python-version == '3.12'
@@ -83,6 +86,8 @@ jobs:
8386
uses: 5monkeys/cobertura-action@v14
8487
with:
8588
minimum_coverage: "50"
89+
report_name: "Django Pytest/Behave Coverage"
90+
8691

8792
- name: Check Docker state (post-test)
8893
if: always()
Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
---
2+
name: Finish Coverage
3+
4+
on:
5+
workflow_run:
6+
workflows: ["Run pytest", "Run behave"]
7+
types:
8+
- completed
9+
10+
jobs:
11+
finish:
12+
runs-on: ubuntu-latest
13+
permissions:
14+
contents: read
15+
needs: [behave, pytest]
16+
steps:
17+
- name: Finish Coveralls
18+
uses: coverallsapp/github-action@v2
19+
with:
20+
parallel-finished: true

.github/workflows/pytest.yml

Lines changed: 27 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -60,10 +60,8 @@ jobs:
6060
- name: Install dependencies
6161
run: |
6262
python -m pip install --upgrade pip
63-
python -m pip install uv
63+
python -m pip install uv pytest-github-actions-annotate-failures
6464
uv pip install --system -r requirements/local.txt --prerelease=allow
65-
# https://github.com/pytest-dev/pytest-github-actions-annotate-failures/pull/68 isn't yet in a release
66-
uv pip install --system git+https://github.com/pytest-dev/pytest-github-actions-annotate-failures.git@6e66cd895fe05cd09be8bad58f5d79110a20385f
6765
6866
- name: Apply migrations
6967
env:
@@ -82,3 +80,29 @@ jobs:
8280
DATABASE_URL: "postgres://scram:@localhost:5432/test_scram_${{ matrix.python-version }}"
8381
REDIS_HOST: "localhost"
8482
run: pytest
83+
84+
- name: Install Scheduler Dependencies
85+
run: |
86+
cd scheduler
87+
uv sync
88+
89+
- name: Run Scheduler Tests
90+
run: |
91+
cd scheduler
92+
uv run pytest
93+
94+
- name: Upload Coverage to Coveralls
95+
if: matrix.python-version == '3.12'
96+
uses: coverallsapp/github-action@v2
97+
with:
98+
file: ./scheduler/coverage.xml
99+
parallel: true
100+
flag-name: scheduler
101+
102+
- name: Display Scheduler Coverage Metrics for scheduler tests
103+
if: matrix.python-version == '3.12'
104+
uses: 5monkeys/cobertura-action@v14
105+
with:
106+
report_name: "Pytest Celery Scheduler Coverage"
107+
path: ./scheduler/coverage.xml
108+
minimum_coverage: "80"

Makefile

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -129,6 +129,11 @@ pass-reset: compose.override.yml
129129
pytest: compose.override.yml
130130
@docker compose run --rm django coverage run -m pytest
131131

132+
## pytest-scheduler: runs scheduler package tests with coverage
133+
.Phony: pytest-scheduler
134+
pytest-scheduler:
135+
@cd scheduler && uv run pytest
136+
132137
## run: brings up the containers as described in compose.override.yml
133138
.Phony: run
134139
run: compose.override.yml

compose.override.local.yml

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -130,6 +130,57 @@ services:
130130
deploy:
131131
replicas: 1
132132

133+
celery-worker:
134+
volumes:
135+
- ./scheduler/src/scheduler/:/app/scheduler:ro
136+
env_file:
137+
- ./.envs/.local/.celery
138+
139+
celery-worker-secondary:
140+
volumes:
141+
- ./scheduler/src/scheduler/:/app/scheduler:ro
142+
env_file:
143+
- ./.envs/.local/.celery-secondary
144+
deploy:
145+
replicas: 1
146+
147+
celery-beat:
148+
command:
149+
["celery", "-A", "scheduler.app:scram_api_scheduler", "beat", "--loglevel=info"]
150+
env_file:
151+
- ./.envs/.local/.celery
152+
environment:
153+
- DISABLE_PROCESS_UPDATES=False
154+
155+
celery-beat-secondary:
156+
command:
157+
["celery", "-A", "scheduler.app:scram_api_scheduler", "beat", "--loglevel=info"]
158+
env_file:
159+
- ./.envs/.local/.celery-secondary
160+
environment:
161+
- DISABLE_PROCESS_UPDATES=False
162+
deploy:
163+
replicas: 1
164+
165+
flower:
166+
env_file:
167+
- ./.envs/.local/.celery
168+
command:
169+
["celery", "-A", "scheduler.app:scram_api_scheduler", "flower", "--port=5555"]
170+
ports:
171+
- "5555:5555"
172+
173+
flower-secondary:
174+
env_file:
175+
- ./.envs/.local/.celery-secondary
176+
command:
177+
["celery", "-A", "scheduler.app:scram_api_scheduler", "flower", "--port=5555"]
178+
ports:
179+
- "5556:5555"
180+
deploy:
181+
replicas: 1
182+
183+
133184
networks:
134185
default:
135186
ipam:

compose.override.production.yml

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -95,6 +95,29 @@ services:
9595
env_file:
9696
- ./.envs/.production/.translator
9797

98+
99+
celery-worker:
100+
volumes:
101+
- ./scheduler/src/scheduler/:/app/scheduler:ro
102+
env_file:
103+
- ./.envs/.production/.celery
104+
105+
celery-beat:
106+
command:
107+
["celery", "-A", "scheduler.app:scram_api_scheduler", "beat", "--loglevel=info"]
108+
env_file:
109+
- ./.envs/.production/.celery
110+
111+
112+
flower:
113+
env_file:
114+
- ./.envs/.production/.celery
115+
command:
116+
["celery", "-A", "scheduler.app:scram_api_scheduler", "flower", "--port=5555"]
117+
ports:
118+
- "5555:5555"
119+
120+
98121
networks:
99122
default:
100123
enable_ipv6: true

compose.yml

Lines changed: 98 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,15 +10,17 @@ services:
1010
condition: service_healthy
1111
redis:
1212
condition: service_healthy
13+
celery-worker:
14+
condition: service_healthy
1315
networks:
1416
default: {}
1517
sysctls:
1618
- net.ipv6.conf.all.disable_ipv6=0
1719
command: /start
1820
healthcheck:
19-
test: ["CMD", "curl", "-f", "http://localhost:8000/process_updates/"]
21+
test: ["CMD", "curl", "-f", "http://localhost:8000/health/"]
2022
interval: 30s
21-
timeout: 30s
23+
timeout: 2s
2224
start_period: 30s
2325
retries: 5
2426
deploy:
@@ -35,15 +37,17 @@ services:
3537
condition: service_healthy
3638
django:
3739
condition: service_healthy
40+
celery-worker-secondary:
41+
condition: service_healthy
3842
networks:
3943
default: {}
4044
sysctls:
4145
- net.ipv6.conf.all.disable_ipv6=0
4246
command: /start
4347
healthcheck:
44-
test: ["CMD", "curl", "-f", "http://localhost:8000/process_updates/"]
48+
test: ["CMD", "curl", "-f", "http://localhost:8000/health/"]
4549
interval: 30s
46-
timeout: 30s
50+
timeout: 2s
4751
start_period: 30s
4852
retries: 5
4953
deploy:
@@ -129,3 +133,93 @@ services:
129133
- net.ipv6.conf.all.disable_ipv6=0
130134
deploy:
131135
replicas: ${TRANSLATOR_REPLICAS:-0}
136+
137+
celery-worker:
138+
build:
139+
context: .
140+
dockerfile: ./compose/production/celery/Dockerfile
141+
depends_on:
142+
redis:
143+
condition: service_healthy
144+
healthcheck:
145+
test: ["CMD", "celery", "-A", "scheduler.app:scram_api_scheduler", "inspect", "ping"]
146+
interval: 30s
147+
timeout: 10s
148+
start_period: 30s
149+
retries: 3
150+
restart: unless-stopped
151+
152+
celery-worker-secondary:
153+
build:
154+
context: .
155+
dockerfile: ./compose/production/celery/Dockerfile
156+
depends_on:
157+
redis:
158+
condition: service_healthy
159+
healthcheck:
160+
test: ["CMD", "celery", "-A", "scheduler.app:scram_api_scheduler", "inspect", "ping"]
161+
interval: 30s
162+
timeout: 10s
163+
start_period: 30s
164+
retries: 3
165+
restart: unless-stopped
166+
deploy:
167+
replicas: ${CELERY_WORKER_REPLICAS:-0}
168+
169+
celery-beat:
170+
build:
171+
context: .
172+
dockerfile: ./compose/production/celery/Dockerfile
173+
depends_on:
174+
redis:
175+
condition: service_healthy
176+
celery-worker:
177+
condition: service_healthy
178+
restart: unless-stopped
179+
180+
celery-beat-secondary:
181+
build:
182+
context: .
183+
dockerfile: ./compose/production/celery/Dockerfile
184+
depends_on:
185+
redis:
186+
condition: service_healthy
187+
celery-worker-secondary:
188+
condition: service_healthy
189+
restart: unless-stopped
190+
deploy:
191+
replicas: ${CELERY_BEAT_REPLICAS:-0}
192+
193+
flower:
194+
build:
195+
context: .
196+
dockerfile: ./compose/production/celery/Dockerfile
197+
depends_on:
198+
redis:
199+
condition: service_healthy
200+
celery-worker:
201+
condition: service_healthy
202+
healthcheck:
203+
test: ["CMD", "curl", "-f", "http://localhost:5555/healthcheck"]
204+
interval: 30s
205+
timeout: 5s
206+
retries: 3
207+
restart: unless-stopped
208+
209+
flower-secondary:
210+
build:
211+
context: .
212+
dockerfile: ./compose/production/celery/Dockerfile
213+
depends_on:
214+
redis:
215+
condition: service_healthy
216+
celery-worker-secondary:
217+
condition: service_healthy
218+
healthcheck:
219+
test: ["CMD", "curl", "-f", "http://localhost:5555/healthcheck"]
220+
interval: 30s
221+
timeout: 5s
222+
retries: 3
223+
restart: unless-stopped
224+
deploy:
225+
replicas: ${FLOWER_REPLICAS:-0}
Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
# Use a Python image with uv pre-installed
2+
FROM ghcr.io/astral-sh/uv:python3.14-bookworm-slim
3+
4+
# Setup a non-root user
5+
RUN groupadd --system --gid 999 nonroot \
6+
&& useradd --system --gid 999 --uid 999 --create-home nonroot
7+
8+
# Install the project into `/app`
9+
WORKDIR /app
10+
11+
# Enable bytecode compilation
12+
ENV UV_COMPILE_BYTECODE=1
13+
14+
# Copy from the cache instead of linking since it's a mounted volume
15+
ENV UV_LINK_MODE=copy
16+
17+
# Omit development dependencies
18+
ENV UV_NO_DEV=1
19+
20+
# Ensure installed tools can be executed out of the box
21+
ENV UV_TOOL_BIN_DIR=/usr/local/bin
22+
23+
COPY ./scheduler/pyproject.toml ./scheduler/uv.lock* ./
24+
25+
# Install the project's dependencies using the lockfile and settings
26+
RUN --mount=type=cache,target=/root/.cache/uv \
27+
--mount=type=bind,source=scheduler/uv.lock,target=uv.lock \
28+
--mount=type=bind,source=scheduler/pyproject.toml,target=pyproject.toml \
29+
uv sync --locked --no-install-project
30+
31+
# Then, add the rest of the project source code and install it
32+
# Installing separately from its dependencies allows optimal layer caching
33+
COPY ./scheduler/src /app/
34+
35+
RUN --mount=type=cache,target=/root/.cache/uv \
36+
uv sync --locked
37+
38+
# Place executables in the environment at the front of the path
39+
ENV PATH="/app/.venv/bin:$PATH"
40+
41+
# Reset the entrypoint, don't invoke `uv`
42+
ENTRYPOINT []
43+
44+
RUN chown -R nonroot:nonroot /app
45+
USER nonroot
46+
47+
CMD ["celery", "-A", "scheduler.app:scram_api_scheduler", "worker", "--loglevel=info", "-E"]

0 commit comments

Comments
 (0)