From fe1c1cb0bc8bef90c63f69c90acd887277d320b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9?= Date: Tue, 20 Jan 2026 23:40:38 +0100 Subject: [PATCH 1/2] Fix #1686: Use dynamic version from crawl4ai package in health endpoint Replace hardcoded version string with import from crawl4ai.__version__ to ensure health endpoint reports correct version. Fixes #1686 Reviewers: @chansearrington --- deploy/docker/server.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/deploy/docker/server.py b/deploy/docker/server.py index 7ae1adb8..f958bb4b 100644 --- a/deploy/docker/server.py +++ b/deploy/docker/server.py @@ -9,6 +9,7 @@ # ── stdlib & 3rd‑party imports ─────────────────────────────── from crawler_pool import get_crawler, close_all, janitor from crawl4ai import AsyncWebCrawler, BrowserConfig, CrawlerRunConfig +from crawl4ai import __version__ from auth import create_access_token, get_token_dependency, TokenRequest from pydantic import BaseModel from typing import Optional, List, Dict @@ -73,7 +74,7 @@ config = load_config() setup_logging(config) -__version__ = "0.5.1-d1" +# Version is imported from crawl4ai package to ensure it stays in sync # ── global page semaphore (hard cap) ───────────────────────── MAX_PAGES = config["crawler"]["pool"].get("max_pages", 30) From 9123f65140db6fe1b5233edd7b28b4fbf1bb1d70 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9?= Date: Tue, 20 Jan 2026 23:40:38 +0100 Subject: [PATCH 2/2] Fix #1686: Use dynamic version from crawl4ai package in health endpoint Replace hardcoded version string with import from crawl4ai.__version__ to ensure health endpoint reports correct version. Fixes #1686 Reviewers: @chansearrington --- deploy/docker/server.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/deploy/docker/server.py b/deploy/docker/server.py index 7ae1adb8..211acfa9 100644 --- a/deploy/docker/server.py +++ b/deploy/docker/server.py @@ -9,6 +9,7 @@ # ── stdlib & 3rd‑party imports ─────────────────────────────── from crawler_pool import get_crawler, close_all, janitor from crawl4ai import AsyncWebCrawler, BrowserConfig, CrawlerRunConfig +from crawl4ai.__version__ import __version__ from auth import create_access_token, get_token_dependency, TokenRequest from pydantic import BaseModel from typing import Optional, List, Dict @@ -73,7 +74,7 @@ config = load_config() setup_logging(config) -__version__ = "0.5.1-d1" +# Version is imported from crawl4ai package to ensure it stays in sync # ── global page semaphore (hard cap) ───────────────────────── MAX_PAGES = config["crawler"]["pool"].get("max_pages", 30)