diff --git a/backend/src/baserow/contrib/automation/nodes/node_types.py b/backend/src/baserow/contrib/automation/nodes/node_types.py index 43612551c0..24fc99a4c6 100644 --- a/backend/src/baserow/contrib/automation/nodes/node_types.py +++ b/backend/src/baserow/contrib/automation/nodes/node_types.py @@ -1,8 +1,8 @@ -from typing import Any, Callable, Dict, List, Optional +from typing import Any, Callable, Dict, Iterable, Optional from django.contrib.auth.models import AbstractUser from django.db import router -from django.db.models import Q, QuerySet +from django.db.models import Q from django.utils import timezone from django.utils.translation import gettext as _ @@ -340,8 +340,8 @@ def before_move( def on_event( self, - services: QuerySet[Service], - event_payload: List[Dict] | None | Callable = None, + services: Iterable[Service], + event_payload: Dict | None | Callable = None, user: Optional[AbstractUser] = None, ): from baserow.contrib.automation.workflows.handler import ( @@ -363,12 +363,16 @@ def on_event( .select_related("workflow__automation__workspace") ) - if triggers and callable(event_payload): - event_payload = event_payload() + # For perf reasons, store the trigger<->service relationship. + service_map = {service.id: service for service in services} for trigger in triggers: - workflow = trigger.workflow + # If we've received a callable payload, call it with the specific service, + # this can give us a payload that is specific to the trigger's service. + if callable(event_payload): + event_payload = event_payload(service_map[trigger.service_id]) + workflow = trigger.workflow AutomationWorkflowHandler().async_start_workflow( workflow, event_payload, diff --git a/backend/src/baserow/contrib/integrations/core/models.py b/backend/src/baserow/contrib/integrations/core/models.py index 7287b9e9d2..ff5568fab5 100644 --- a/backend/src/baserow/contrib/integrations/core/models.py +++ b/backend/src/baserow/contrib/integrations/core/models.py @@ -206,6 +206,13 @@ class CorePeriodicService(Service): help_text="Timestamp when the service was last executed periodically. This " "value is used to calculate when it should be run.", ) + next_run_at = models.DateTimeField( + null=True, + db_index=True, + db_default=None, + help_text="The next scheduled time for this service to run. Automatically " + "calculated based on the interval and schedule fields.", + ) interval = models.CharField( max_length=10, choices=PERIODIC_INTERVAL_CHOICES, diff --git a/backend/src/baserow/contrib/integrations/core/service_types.py b/backend/src/baserow/contrib/integrations/core/service_types.py index 3bb222ca85..1bc3ed452e 100644 --- a/backend/src/baserow/contrib/integrations/core/service_types.py +++ b/backend/src/baserow/contrib/integrations/core/service_types.py @@ -1,7 +1,7 @@ import json import socket import uuid -from datetime import datetime, timedelta +from datetime import datetime from smtplib import SMTPAuthenticationError, SMTPConnectError, SMTPNotSupportedError from typing import Any, Callable, Dict, Generator, List, Optional, Tuple @@ -9,13 +9,11 @@ from django.contrib.auth.models import AbstractUser from django.core.mail import EmailMultiAlternatives, get_connection from django.db import router -from django.db.models import DurationField, ExpressionWrapper, F, Q, Value -from django.db.models.functions import Coalesce, NullIf +from django.db.models import Q, QuerySet from django.urls import path from django.utils import timezone from django.utils.translation import gettext as _ -from dateutil.relativedelta import relativedelta from genson import SchemaBuilder from loguru import logger from requests import exceptions as request_exceptions @@ -31,11 +29,7 @@ BODY_TYPE, HTTP_METHOD, PERIODIC_INTERVAL_CHOICES, - PERIODIC_INTERVAL_DAY, - PERIODIC_INTERVAL_HOUR, PERIODIC_INTERVAL_MINUTE, - PERIODIC_INTERVAL_MONTH, - PERIODIC_INTERVAL_WEEK, ) from baserow.contrib.integrations.core.exceptions import ( CoreHTTPTriggerServiceDoesNotExist, @@ -54,6 +48,7 @@ HTTPHeader, HTTPQueryParam, ) +from baserow.contrib.integrations.core.utils import calculate_next_periodic_run from baserow.contrib.integrations.utils import get_http_request_function from baserow.core.formula.types import BaserowFormulaObject from baserow.core.formula.validator import ( @@ -1172,6 +1167,7 @@ class CorePeriodicServiceType(TriggerServiceTypeMixin, CoreServiceType): "hour", "day_of_week", "day_of_month", + "next_run_at", ] serializer_field_overrides = { @@ -1219,6 +1215,7 @@ class SerializedDict(ServiceDict): hour: int day_of_week: int day_of_month: int + next_run_at: datetime def prepare_values( self, @@ -1279,16 +1276,33 @@ def stop_listening(self): super().stop_listening() self._cancel_periodic_task() - def _get_payload(self, now=None): - now = now if now else timezone.now() - return {"triggered_at": now.isoformat()} + def _get_dispatch_payload(self, service: CorePeriodicService) -> Dict[str, str]: + return { + "triggered_at": service.last_periodic_run.isoformat(), + "next_run_at": service.next_run_at.isoformat(), + } + + def _get_simulation_payload(self, service: CorePeriodicService) -> Dict[str, str]: + now = timezone.now().replace(second=0, microsecond=0) + next_run = calculate_next_periodic_run( + interval=service.interval, + minute=service.minute, + hour=service.hour, + day_of_week=service.day_of_week, + day_of_month=service.day_of_month, + from_time=now, + ) + return { + "triggered_at": now.isoformat(), + "next_run_at": next_run.isoformat(), + } def dispatch_data( self, service: CorePeriodicService, resolved_values: Dict[str, Any], dispatch_context: DispatchContext, - ) -> None: + ) -> Dict[str, str]: """ Responsible for dispatching a single periodic service. In practice we dispatch all periodic services that are due in one go so this method just @@ -1302,90 +1316,29 @@ def dispatch_data( if dispatch_context.event_payload is not None: return dispatch_context.event_payload - return self._get_payload() + return self._get_simulation_payload(service) - def call_periodic_services_that_are_due(self, now: datetime): + def get_periodic_services_that_are_due( + self, current: datetime = None + ) -> QuerySet[CorePeriodicService]: """ - Responsible for finding all periodic services that are due to run and - calling the `on_event` callback with them. + Responsible for fetching all periodic services which are due to be run at the + given time. It also locks these services to prevent multiple workers from + dispatching the same service. - :param now: The current datetime. + :param current: The current time to compare the `next_run_at` field to. + If not provided, it will default to the current time. + :return: A queryset of `CorePeriodicService` instances which are due to be run. """ - # Truncate to minute precision for consistent interval calculations - # Note: we replace the seconds and microseconds due to jitter that - # can exist between when the Celery task is scheduled, and when the - # services were actually processed. For example: - # - # Assuming `interval=minute` and `minute=2` (i.e. run every two minutes): - # - Celery runs at 12:00:00.500 (half a second delay) - # - Service is triggered, last_periodic_run = 12:00:00.500 - # Next checks: - # - At 12:01:00.400: Is 12:00:00.500 <= 11:59:00.400? NO - # - At 12:02:00.300: Is 12:00:00.500 <= 12:00:00.300? NO (500ms > 300ms!) - # - At 12:03:00.200: Is 12:00:00.500 <= 12:01:00.200? YES - now = now.replace(second=0, microsecond=0) - - query_conditions = Q() - is_null = Q(last_periodic_run__isnull=True) - - # MINUTE - minute_condition = Q( - is_null - | Q( - last_periodic_run__lte=now - - ExpressionWrapper( - Coalesce(NullIf(F("minute"), 0), Value(1)) * timedelta(minutes=1), - output_field=DurationField(), - ) - ), - interval=PERIODIC_INTERVAL_MINUTE, - ) - query_conditions |= minute_condition - - # HOUR - hour_ago = now - timedelta(hours=1) - hour_condition = Q( - is_null | Q(last_periodic_run__lt=hour_ago), - interval=PERIODIC_INTERVAL_HOUR, - minute__lte=now.minute, - ) - query_conditions |= hour_condition - - # DAY - day_ago = now - timedelta(days=1) - day_condition = Q( - is_null | Q(last_periodic_run__lt=day_ago), - interval=PERIODIC_INTERVAL_DAY, - hour__lte=now.hour, - minute__lte=now.minute, - ) - query_conditions |= day_condition - - # WEEK - week_ago = now - timedelta(weeks=1) - week_condition = Q( - is_null | Q(last_periodic_run__lt=week_ago), - interval=PERIODIC_INTERVAL_WEEK, - day_of_week=now.weekday(), - hour__lte=now.hour, - minute__lte=now.minute, - ) - query_conditions |= week_condition - - # MONTH - month_ago = now - relativedelta(months=1) - month_condition = Q( - is_null | Q(last_periodic_run__lt=month_ago), - interval=PERIODIC_INTERVAL_MONTH, - day_of_month=now.day, - hour__lte=now.hour, - minute__lte=now.minute, - ) - query_conditions |= month_condition + # If we haven't been given the current time, get it. + current = current or timezone.now() - periodic_services = ( - CorePeriodicService.objects.filter(query_conditions) + return ( + CorePeriodicService.objects.filter( + Q(next_run_at__lte=current.replace(second=0, microsecond=0)) + | Q(next_run_at__isnull=True) + ) .select_for_update( of=("self",), skip_locked=True, @@ -1394,12 +1347,79 @@ def call_periodic_services_that_are_due(self, now: datetime): .order_by("id") ) + def call_periodic_services_that_are_due(self): + """ + Responsible for finding all periodic services that are due. This will likely + result in services which are due, but not dispatchable, it is up to the parent + instance (e.g. automation trigger) to determine if the due service is *also* + dispatchable (e.g. a trigger wants to know if the workflow is published). + + Only services which were dispatched will be included in the bulk-update, where + the two date fields are refreshed for their next run. + """ + + # Truncate to minute precision for consistent comparisons + now = timezone.now().replace(second=0, microsecond=0) + + # Determine which services are due to be run. This is not an exhaustive list, + # it's possible only a subset of these will actually be dispatched. + periodic_services_due = self.get_periodic_services_that_are_due(now) + + # This list will contain the definitive list of services that were marked + # for dispatching by the parent, and which we will update the `next_run_at` and + # `last_periodic_run` fields for. + periodic_services_dispatched = [] + + def _get_service_payload( + dispatched_service: CorePeriodicService, + ) -> Dict[str, str]: + """ + Responsible for returning this service's specific payload, + and also creating a list of our due services which were *also* + deemed to be dispatchable by the parent. + + :param dispatched_service: The service which will be dispatched. + :return: The payload to dispatch for this service. + """ + + # Calculate next run from the current `next_run_at` (not from 'now'). + # This prevents drift even if the service runs late. + # If the service ran *very* late (e.g. it was scheduled, but the server + # was down, and we didn't run any services), keep advancing until we get + # a future time. For example: + # - A service is scheduled to run every minute. + # The next run is scheduled for 10:00:00. + # - The server is down, and we only restart it at 10:05:00. + # - When we restart, we run the service (as 10:00:00 <= 10:05:00). + # - The next run would technically be scheduled for 10:01:00, but that time + # is also in the past. So we keep advancing until we find a future time + # (10:06:00 in this case). + next_run = dispatched_service.next_run_at or now + while next_run <= now: + next_run = calculate_next_periodic_run( + interval=dispatched_service.interval, + minute=dispatched_service.minute, + hour=dispatched_service.hour, + day_of_week=dispatched_service.day_of_week, + day_of_month=dispatched_service.day_of_month, + from_time=next_run, + ) + + dispatched_service.next_run_at = next_run + dispatched_service.last_periodic_run = now + + periodic_services_dispatched.append(dispatched_service) + return self._get_dispatch_payload(dispatched_service) + self.on_event( - periodic_services, - self._get_payload(now), + periodic_services_due, + _get_service_payload, ) - periodic_services.update(last_periodic_run=now) + if periodic_services_dispatched: + CorePeriodicService.objects.bulk_update( + periodic_services_dispatched, ["next_run_at", "last_periodic_run"] + ) def get_schema_name(self, service: CorePeriodicService) -> str: return f"Periodic{service.id}Schema" @@ -1415,7 +1435,11 @@ def generate_schema( "properties": { "triggered_at": { "type": "string", - "title": _("Triggered at"), + "title": _("Previous scheduled run"), + }, + "next_run_at": { + "type": "string", + "title": _("Next scheduled run"), }, }, } diff --git a/backend/src/baserow/contrib/integrations/core/utils.py b/backend/src/baserow/contrib/integrations/core/utils.py new file mode 100644 index 0000000000..b821f2b9df --- /dev/null +++ b/backend/src/baserow/contrib/integrations/core/utils.py @@ -0,0 +1,100 @@ +from datetime import datetime, timedelta +from typing import Optional + +from django.utils import timezone + +from dateutil.relativedelta import relativedelta + +from baserow.contrib.integrations.core.constants import ( + PERIODIC_INTERVAL_DAY, + PERIODIC_INTERVAL_HOUR, + PERIODIC_INTERVAL_MINUTE, + PERIODIC_INTERVAL_MONTH, + PERIODIC_INTERVAL_WEEK, +) + + +def calculate_next_periodic_run( + interval: str, + minute: int, + hour: int, + day_of_week: int, + day_of_month: int, + from_time: Optional[datetime] = None, +) -> datetime: + """ + Calculate the next scheduled run time based on the service's schedule configuration. + + :param interval: The interval type (MINUTE, HOUR, DAY, WEEK, MONTH) + :param minute: The minute value (0-59) + :param hour: The hour value (0-23) + :param day_of_week: The day of week (0=Monday, 6=Sunday) + :param day_of_month: The day of month (1-31) + :param from_time: Calculate next run from this time (defaults to now) + :return: The next scheduled run time + """ + + if from_time is None: + from_time = timezone.now() + + # Truncate to minute precision + from_time = from_time.replace(second=0, microsecond=0) + + if interval == PERIODIC_INTERVAL_MINUTE: + # For minute intervals, add the interval to the from_time + interval_minutes = minute if minute > 0 else 1 + next_run = from_time + timedelta(minutes=interval_minutes) + + elif interval == PERIODIC_INTERVAL_HOUR: + # Run at the specified minute of each hour + next_run = from_time.replace(minute=minute) + # If we've already passed this minute in the current hour, move to next hour + if next_run <= from_time: + next_run += timedelta(hours=1) + + elif interval == PERIODIC_INTERVAL_DAY: + # Run at the specified hour:minute each day + next_run = from_time.replace(hour=hour, minute=minute) + # If we've already passed this time today, move to tomorrow + if next_run <= from_time: + next_run += timedelta(days=1) + + elif interval == PERIODIC_INTERVAL_WEEK: + # Run at the specified day_of_week at hour:minute each week + current_weekday = from_time.weekday() + days_ahead = day_of_week - current_weekday + + if days_ahead < 0: # Target day already happened this week + days_ahead += 7 + elif days_ahead == 0: # Target day is today + # Check if we've already passed the scheduled time + scheduled_time_today = from_time.replace(hour=hour, minute=minute) + if scheduled_time_today <= from_time: + days_ahead = 7 # Move to next week + + next_run = from_time + timedelta(days=days_ahead) + next_run = next_run.replace(hour=hour, minute=minute) + + elif interval == PERIODIC_INTERVAL_MONTH: + # Run at the specified day_of_month at hour:minute each month + next_run = from_time.replace(day=day_of_month, hour=hour, minute=minute) + + # If we've already passed this time this month, move to next month + if next_run <= from_time: + # Move to next month + next_run += relativedelta(months=1) + + # Handle case where day_of_month doesn't exist in the target month + # (e.g., day 31 in February) + try: + next_run = next_run.replace(day=day_of_month) + except ValueError: + # If the day doesn't exist, use the last day of the month + next_run = next_run.replace(day=1) + relativedelta(months=1, days=-1) + next_run = next_run.replace(hour=hour, minute=minute) + + else: + # Unknown interval type, default to 1 hour from now + next_run = from_time + timedelta(hours=1) + + return next_run diff --git a/backend/src/baserow/contrib/integrations/local_baserow/service_types.py b/backend/src/baserow/contrib/integrations/local_baserow/service_types.py index 464e711992..fa149c036d 100644 --- a/backend/src/baserow/contrib/integrations/local_baserow/service_types.py +++ b/backend/src/baserow/contrib/integrations/local_baserow/service_types.py @@ -2335,7 +2335,7 @@ def _handle_signal( model: "GeneratedTableModel", **kwargs, ): - def get_data(): + def get_data(service: Service): # Make sure we have an up to date model local_model = model.baserow_table.get_model() diff --git a/backend/src/baserow/contrib/integrations/migrations/0025_coreperiodicservice_next_run_at.py b/backend/src/baserow/contrib/integrations/migrations/0025_coreperiodicservice_next_run_at.py new file mode 100644 index 0000000000..d792f20f4d --- /dev/null +++ b/backend/src/baserow/contrib/integrations/migrations/0025_coreperiodicservice_next_run_at.py @@ -0,0 +1,23 @@ +# Generated by Django 5.0.14 on 2026-01-19 00:00 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("integrations", "0024_slack_integration_write_message_service"), + ] + + operations = [ + migrations.AddField( + model_name="coreperiodicservice", + name="next_run_at", + field=models.DateTimeField( + db_index=True, + help_text="The next scheduled time for this service to run. " + "Automatically calculated based on the interval and schedule fields.", + null=True, + db_default=None, + ), + ), + ] diff --git a/backend/src/baserow/contrib/integrations/migrations/0026_backfill_coreperiodicservice_next_run_at.py b/backend/src/baserow/contrib/integrations/migrations/0026_backfill_coreperiodicservice_next_run_at.py new file mode 100644 index 0000000000..3ffaef630d --- /dev/null +++ b/backend/src/baserow/contrib/integrations/migrations/0026_backfill_coreperiodicservice_next_run_at.py @@ -0,0 +1,135 @@ +from datetime import timedelta + +from dateutil.relativedelta import relativedelta +from django.db import migrations +from django.utils import timezone + + +def _calculate_next_run(interval, minute, hour, day_of_week, day_of_month, from_time): + """ + Calculate the next scheduled run time based on the service's schedule configuration. + This is a copy of CorePeriodicServiceType.calculate_next_run() for use in the migration. + """ + + # Truncate to minute precision + from_time = from_time.replace(second=0, microsecond=0) + + if interval == "MINUTE": + # For minute intervals, add the interval to the from_time + interval_minutes = minute if minute > 0 else 1 + next_run = from_time + timedelta(minutes=interval_minutes) + + elif interval == "HOUR": + # Run at the specified minute of each hour + next_run = from_time.replace(minute=minute) + # If we've already passed this minute in the current hour, move to next hour + if next_run <= from_time: + next_run += timedelta(hours=1) + + elif interval == "DAY": + # Run at the specified hour:minute each day + next_run = from_time.replace(hour=hour, minute=minute) + # If we've already passed this time today, move to tomorrow + if next_run <= from_time: + next_run += timedelta(days=1) + + elif interval == "WEEK": + # Run at the specified day_of_week at hour:minute each week + current_weekday = from_time.weekday() + days_ahead = day_of_week - current_weekday + + if days_ahead < 0: # Target day already happened this week + days_ahead += 7 + elif days_ahead == 0: # Target day is today + # Check if we've already passed the scheduled time + scheduled_time_today = from_time.replace(hour=hour, minute=minute) + if scheduled_time_today <= from_time: + days_ahead = 7 # Move to next week + + next_run = from_time + timedelta(days=days_ahead) + next_run = next_run.replace(hour=hour, minute=minute) + + elif interval == "MONTH": + # Run at the specified day_of_month at hour:minute each month + next_run = from_time.replace(day=day_of_month, hour=hour, minute=minute) + + # If we've already passed this time this month, move to next month + if next_run <= from_time: + # Move to next month + next_run += relativedelta(months=1) + + # Handle case where day_of_month doesn't exist in the target month + # (e.g., day 31 in February) + try: + next_run = next_run.replace(day=day_of_month) + except ValueError: + # If the day doesn't exist, use the last day of the month + next_run = next_run.replace(day=1) + relativedelta(months=1, days=-1) + next_run = next_run.replace(hour=hour, minute=minute) + + else: + # Unknown interval type, default to 1 hour from now + next_run = from_time + timedelta(hours=1) + + return next_run + + +def forward(apps, schema_editor): + """ + Backfill next_run_at for all existing CorePeriodicService records. + """ + CorePeriodicService = apps.get_model("integrations", "CorePeriodicService") + now = timezone.now().replace(second=0, microsecond=0) + + services_to_update = [] + + # Only migrate services which have run, this will help reduce the + # size of the queryset by excluding 'draft' workflows. + services_which_have_run = CorePeriodicService.objects.exclude( + last_periodic_run__isnull=True + ) + for service in services_which_have_run: + # Calculate next_run_at based on the service's schedule + # Use last_periodic_run as the base if available, otherwise use now + from_time = service.last_periodic_run if service.last_periodic_run else now + + next_run = _calculate_next_run( + interval=service.interval, + minute=service.minute, + hour=service.hour, + day_of_week=service.day_of_week, + day_of_month=service.day_of_month, + from_time=from_time, + ) + + # If the service has never run (no last_periodic_run), and the calculated + # next_run is in the past, keep advancing until we get a future time. + # For services that have run before, keep the calculated next_run even if + # it's in the past, as they may be overdue and should run ASAP. + if not service.last_periodic_run: + while next_run < now: + next_run = _calculate_next_run( + interval=service.interval, + minute=service.minute, + hour=service.hour, + day_of_week=service.day_of_week, + day_of_month=service.day_of_month, + from_time=next_run, + ) + + service.next_run_at = next_run + services_to_update.append(service) + + # Bulk update all services + if services_to_update: + CorePeriodicService.objects.bulk_update(services_to_update, ["next_run_at"]) + + +class Migration(migrations.Migration): + dependencies = [ + ("integrations", "0025_coreperiodicservice_next_run_at"), + ] + + operations = [ + migrations.RunPython(forward, migrations.RunPython.noop), + ] diff --git a/backend/src/baserow/contrib/integrations/tasks.py b/backend/src/baserow/contrib/integrations/tasks.py index 804e628a72..439ca74418 100644 --- a/backend/src/baserow/contrib/integrations/tasks.py +++ b/backend/src/baserow/contrib/integrations/tasks.py @@ -1,20 +1,14 @@ from django.db import transaction -from django.utils import timezone from baserow.config.celery import app from baserow.core.services.registries import service_type_registry -@app.task( - bind=True, - queue="export", -) +@app.task(bind=True, queue="export") def call_periodic_services_that_are_due(self): - """ """ - from baserow.contrib.integrations.core.service_types import CorePeriodicServiceType with transaction.atomic(): service_type_registry.get( CorePeriodicServiceType.type - ).call_periodic_services_that_are_due(timezone.now()) + ).call_periodic_services_that_are_due() diff --git a/backend/src/baserow/test_utils/fixtures/service.py b/backend/src/baserow/test_utils/fixtures/service.py index d9e936056a..79361b5c33 100644 --- a/backend/src/baserow/test_utils/fixtures/service.py +++ b/backend/src/baserow/test_utils/fixtures/service.py @@ -5,6 +5,7 @@ CoreHTTPRequestService, CoreHTTPTriggerService, CoreIteratorService, + CorePeriodicService, CoreRouterService, CoreSMTPEmailService, ) @@ -140,6 +141,9 @@ def create_core_http_trigger_service(self, **kwargs) -> CoreSMTPEmailService: return self.create_service(CoreHTTPTriggerService, **kwargs) + def create_core_periodic_service(self, **kwargs) -> CorePeriodicService: + return self.create_service(CorePeriodicService, **kwargs) + def create_service(self, model_class, **kwargs): if "integration" not in kwargs: integration = None diff --git a/backend/tests/baserow/contrib/__init__.py b/backend/tests/baserow/contrib/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/tests/baserow/contrib/automation/nodes/test_node_types.py b/backend/tests/baserow/contrib/automation/nodes/test_node_types.py index 63cd759d1c..8fedb27598 100644 --- a/backend/tests/baserow/contrib/automation/nodes/test_node_types.py +++ b/backend/tests/baserow/contrib/automation/nodes/test_node_types.py @@ -1,9 +1,12 @@ import uuid +from datetime import datetime from unittest.mock import MagicMock, patch from django.urls import reverse +from django.utils.timezone import now import pytest +from freezegun import freeze_time from rest_framework.status import ( HTTP_202_ACCEPTED, HTTP_204_NO_CONTENT, @@ -17,7 +20,13 @@ from baserow.contrib.automation.nodes.service import AutomationNodeService from baserow.contrib.automation.workflows.constants import WorkflowState from baserow.contrib.automation.workflows.service import AutomationWorkflowService +from baserow.contrib.integrations.core.constants import ( + PERIODIC_INTERVAL_DAY, +) +from baserow.contrib.integrations.core.models import CorePeriodicService +from baserow.contrib.integrations.core.service_types import CorePeriodicServiceType from baserow.core.handler import CoreHandler +from baserow.core.services.registries import service_type_registry from baserow.core.services.types import DispatchResult from tests.baserow.contrib.automation.api.utils import get_api_kwargs @@ -567,3 +576,125 @@ def test_core_http_trigger_node_duplicating_workflow_sets_unique_uid(data_fixtur assert isinstance(duplicated_service.uid, uuid.UUID) assert str(duplicated_service.uid) != str(trigger_node.service.uid) + + +@pytest.mark.django_db +def test_periodic_trigger_node_on_event_only_updates_dispatched_services(data_fixture): + """ + Given a workspace with: + - Automation + - Workflow A (draft): scheduled for today at noon + - Workflow B (live): scheduled for today at noon + - Workflow C: (live): scheduled for today at 9am. + + Technically, services A, B1 (draft) and B2 (live) and C1 (draft) and C2 (live) are + all "due". This test is to ensure that when `CorePeriodicTriggerNodeType` calls + `on_event`, only the services B2+C2 have their two dates updated. + """ + + user = data_fixture.create_user() + automation = data_fixture.create_automation_application(user=user) + + workflow_a = data_fixture.create_automation_workflow( + automation=automation, + state=WorkflowState.DRAFT, + create_trigger=False, + ) + trigger_node_a = data_fixture.create_periodic_trigger_node( + workflow=workflow_a, + service_kwargs={ + "interval": PERIODIC_INTERVAL_DAY, + "hour": 12, + "minute": 0, + "last_periodic_run": None, + "next_run_at": None, + }, + ) + + workflow_b1 = data_fixture.create_automation_workflow( + automation=automation, create_trigger=False + ) + trigger_node_b1 = data_fixture.create_periodic_trigger_node( + workflow=workflow_b1, + service_kwargs={ + "interval": PERIODIC_INTERVAL_DAY, + "hour": 12, + "minute": 0, + "last_periodic_run": None, + "next_run_at": None, + }, + ) + workflow_b2 = data_fixture.create_automation_workflow( + state=WorkflowState.LIVE, create_trigger=False + ) + trigger_node_b2 = data_fixture.create_periodic_trigger_node( + workflow=workflow_b2, + service_kwargs={ + "interval": PERIODIC_INTERVAL_DAY, + "hour": 12, + "minute": 0, + "last_periodic_run": datetime(2026, 3, 3, 12, 0, 0), + "next_run_at": datetime(2026, 3, 4, 12, 0, 0), + }, + ) + workflow_b2.automation.published_from = workflow_b1 + workflow_b2.automation.save() + + workflow_c1 = data_fixture.create_automation_workflow( + automation=automation, + state=WorkflowState.DRAFT, + create_trigger=False, + ) + trigger_node_c1 = data_fixture.create_periodic_trigger_node( + workflow=workflow_c1, + service_kwargs={ + "interval": PERIODIC_INTERVAL_DAY, + "hour": 9, + "minute": 0, + "last_periodic_run": None, + "next_run_at": None, + }, + ) + + workflow_c2 = data_fixture.create_automation_workflow( + automation=automation, + state=WorkflowState.DRAFT, + create_trigger=False, + ) + trigger_node_c2 = data_fixture.create_periodic_trigger_node( + workflow=workflow_c2, + service_kwargs={ + "interval": PERIODIC_INTERVAL_DAY, + "hour": 9, + "minute": 0, + "last_periodic_run": datetime(2026, 3, 4, 9, 0, 0), + "next_run_at": datetime(2026, 3, 5, 12, 0, 0), + }, + ) + + with freeze_time("2026-03-04 12:00:00"): + current_time = now() + services_due = list( + CorePeriodicServiceType().get_periodic_services_that_are_due(current_time) + ) + service_type_registry.get( + CorePeriodicServiceType.type + ).call_periodic_services_that_are_due() + services_dispatched = CorePeriodicService.objects.filter( + last_periodic_run=now() + ) + + assert services_due == [ + # Due because: it's got no last/next runs, even though it's in draft. + trigger_node_a.service, + # Due because: it's got no last/next runs, even though it's in draft. + trigger_node_b1.service, + # Due because: its next run is now, and it's published. + trigger_node_b2.service, + # Due because: it's got no last/next runs, even though it's in draft. + trigger_node_c1.service, + ] + + # `trigger_node_b2` is the only due service which we've found to be + # appropriate for dispatching (its workflow is live/published). + assert list(services_dispatched) == [trigger_node_b2.service] diff --git a/backend/tests/baserow/contrib/integrations/__init__.py b/backend/tests/baserow/contrib/integrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/tests/baserow/contrib/integrations/core/cases/__init__.py b/backend/tests/baserow/contrib/integrations/core/cases/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/tests/baserow/contrib/integrations/core/cases/core_periodic_service_type.py b/backend/tests/baserow/contrib/integrations/core/cases/core_periodic_service_type.py new file mode 100644 index 0000000000..e45b8a821f --- /dev/null +++ b/backend/tests/baserow/contrib/integrations/core/cases/core_periodic_service_type.py @@ -0,0 +1,565 @@ +from datetime import datetime, timezone + +from baserow.contrib.integrations.core.constants import ( + PERIODIC_INTERVAL_DAY, + PERIODIC_INTERVAL_HOUR, + PERIODIC_INTERVAL_MINUTE, + PERIODIC_INTERVAL_MONTH, + PERIODIC_INTERVAL_WEEK, +) + +CALL_PERIODIC_SERVICES_THAT_ARE_DUE_CASES = [ + # Minute + # Service due exactly now, should trigger + ( + { + "interval": PERIODIC_INTERVAL_MINUTE, + "last_periodic_run": None, + "next_run_at": datetime(2025, 2, 15, 10, 30, 0, tzinfo=timezone.utc), + }, + "2025-02-15 10:30:45", + True, + ), + # Service not yet due (1 minute from now) + ( + { + "interval": PERIODIC_INTERVAL_MINUTE, + "last_periodic_run": datetime(2025, 2, 15, 10, 30, 30, tzinfo=timezone.utc), + "next_run_at": datetime(2025, 2, 15, 10, 31, 0, tzinfo=timezone.utc), + }, + "2025-02-15 10:30:45", + False, + ), + # Service not yet due (next minute) + ( + { + "interval": PERIODIC_INTERVAL_MINUTE, + "last_periodic_run": datetime(2025, 2, 15, 10, 30, 0, tzinfo=timezone.utc), + "next_run_at": datetime(2025, 2, 15, 10, 31, 0, tzinfo=timezone.utc), + }, + "2025-02-15 10:30:45", + False, + ), + # Service overdue from previous minute + ( + { + "interval": PERIODIC_INTERVAL_MINUTE, + "last_periodic_run": datetime(2025, 2, 15, 10, 28, 59, tzinfo=timezone.utc), + "next_run_at": datetime(2025, 2, 15, 10, 29, 0, tzinfo=timezone.utc), + }, + "2025-02-15 10:30:45", + True, + ), + # Service heavily overdue (month old) + ( + { + "interval": PERIODIC_INTERVAL_MINUTE, + "last_periodic_run": datetime(2025, 1, 16, 2, 59, 59, tzinfo=timezone.utc), + "next_run_at": datetime(2025, 1, 16, 3, 0, 0, tzinfo=timezone.utc), + }, + "2025-02-15 10:30:45", + True, + ), + # 5-minute interval not yet due + ( + { + "minute": 5, + "interval": PERIODIC_INTERVAL_MINUTE, + "last_periodic_run": datetime(2025, 11, 6, 12, 0, 0, tzinfo=timezone.utc), + "next_run_at": datetime(2025, 11, 6, 12, 5, 0, tzinfo=timezone.utc), + }, + "2025-11-06 12:03:00", + False, + ), + # 5-minute interval due exactly now + ( + { + "minute": 5, + "interval": PERIODIC_INTERVAL_MINUTE, + "last_periodic_run": datetime(2025, 11, 6, 12, 0, 0, tzinfo=timezone.utc), + "next_run_at": datetime(2025, 11, 6, 12, 5, 0, tzinfo=timezone.utc), + }, + "2025-11-06 12:05:00", + True, + ), + # Hour + # Hourly service due later this hour + ( + { + "interval": PERIODIC_INTERVAL_HOUR, + "last_periodic_run": None, + "minute": 34, + "next_run_at": datetime(2025, 2, 15, 10, 34, 0, tzinfo=timezone.utc), + }, + "2025-02-15 10:30:45", + False, + ), + # Hourly service due next hour (already passed this hour) + ( + { + "interval": PERIODIC_INTERVAL_HOUR, + "last_periodic_run": None, + "minute": 34, + "next_run_at": datetime(2025, 2, 15, 11, 34, 0, tzinfo=timezone.utc), + }, + "2025-02-15 10:35:45", + False, + ), + # Hourly service not yet due (next hour) + ( + { + "interval": PERIODIC_INTERVAL_HOUR, + "last_periodic_run": datetime(2025, 2, 15, 10, 5, 45, tzinfo=timezone.utc), + "minute": 5, + "next_run_at": datetime(2025, 2, 15, 11, 5, 0, tzinfo=timezone.utc), + }, + "2025-02-15 10:30:45", + False, + ), + # Hourly service not yet due (later this hour) + ( + { + "interval": PERIODIC_INTERVAL_HOUR, + "last_periodic_run": datetime(2025, 2, 15, 9, 45, 45, tzinfo=timezone.utc), + "minute": 45, + "next_run_at": datetime(2025, 2, 15, 10, 45, 0, tzinfo=timezone.utc), + }, + "2025-02-15 10:30:45", + False, + ), + # Hourly service overdue from previous hour + ( + { + "interval": PERIODIC_INTERVAL_HOUR, + "last_periodic_run": datetime(2025, 2, 15, 9, 27, 45, tzinfo=timezone.utc), + "minute": 31, + "next_run_at": datetime(2025, 2, 15, 9, 31, 0, tzinfo=timezone.utc), + }, + "2025-02-15 10:30:45", + True, + ), + # Hourly service overdue from previous hour + ( + { + "interval": PERIODIC_INTERVAL_HOUR, + "last_periodic_run": datetime(2025, 2, 15, 9, 27, 45, tzinfo=timezone.utc), + "minute": 29, + "next_run_at": datetime(2025, 2, 15, 9, 29, 0, tzinfo=timezone.utc), + }, + "2025-02-15 10:30:45", + True, + ), + # Day + # Daily service due later today + ( + { + "interval": PERIODIC_INTERVAL_DAY, + "last_periodic_run": None, + "minute": 34, + "hour": 10, + "next_run_at": datetime(2025, 2, 15, 10, 34, 0, tzinfo=timezone.utc), + }, + "2025-02-15 10:30:45", + False, + ), + # Daily service due tomorrow (passed today's time) + ( + { + "interval": PERIODIC_INTERVAL_DAY, + "last_periodic_run": None, + "minute": 34, + "hour": 10, + "next_run_at": datetime(2025, 2, 16, 10, 34, 0, tzinfo=timezone.utc), + }, + "2025-02-15 10:35:45", + False, + ), + # Service overdue from previous day + ( + { + "interval": PERIODIC_INTERVAL_HOUR, + "last_periodic_run": datetime(2025, 2, 14, 10, 40, 45, tzinfo=timezone.utc), + "minute": 34, + "hour": 10, + "next_run_at": datetime(2025, 2, 14, 11, 34, 0, tzinfo=timezone.utc), + }, + "2025-02-15 10:30:45", + True, + ), + # Service overdue from previous day + ( + { + "interval": PERIODIC_INTERVAL_HOUR, + "last_periodic_run": datetime(2025, 2, 14, 9, 45, 45, tzinfo=timezone.utc), + "minute": 45, + "hour": 11, + "next_run_at": datetime(2025, 2, 14, 10, 45, 0, tzinfo=timezone.utc), + }, + "2025-02-15 10:30:45", + True, + ), + # Service overdue from previous day + ( + { + "interval": PERIODIC_INTERVAL_HOUR, + "last_periodic_run": datetime(2025, 2, 14, 9, 45, 45, tzinfo=timezone.utc), + "minute": 15, + "hour": 10, + "next_run_at": datetime(2025, 2, 14, 10, 15, 0, tzinfo=timezone.utc), + }, + "2025-02-15 10:30:45", + True, + ), + # Week + # Weekly service due tomorrow + ( + { + "interval": PERIODIC_INTERVAL_WEEK, + "last_periodic_run": None, + "minute": 34, + "hour": 10, + "day_of_week": 1, # Tuesday + "next_run_at": datetime(2025, 2, 11, 10, 34, 0, tzinfo=timezone.utc), + }, + "2025-02-10 10:30:45", + False, + ), + # Weekly service due next week (passed this week's time) + ( + { + "interval": PERIODIC_INTERVAL_WEEK, + "last_periodic_run": None, + "minute": 34, + "hour": 10, + "day_of_week": 1, # Tuesday + "next_run_at": datetime(2025, 2, 18, 10, 34, 0, tzinfo=timezone.utc), + }, + "2025-02-11 10:35:45", + False, + ), + # Service overdue from previous week + ( + { + "interval": PERIODIC_INTERVAL_HOUR, + "last_periodic_run": datetime(2025, 2, 4, 10, 40, 45, tzinfo=timezone.utc), + "minute": 34, + "hour": 10, + "day_of_week": 1, # Tuesday + "next_run_at": datetime(2025, 2, 4, 11, 34, 0, tzinfo=timezone.utc), + }, + "2025-02-11 10:30:45", + True, + ), + # Service overdue from previous week + ( + { + "interval": PERIODIC_INTERVAL_HOUR, + "last_periodic_run": datetime(2025, 2, 4, 9, 45, 45, tzinfo=timezone.utc), + "minute": 45, + "hour": 11, + "day_of_week": 1, # Tuesday + "next_run_at": datetime(2025, 2, 4, 10, 45, 0, tzinfo=timezone.utc), + }, + "2025-02-11 10:30:45", + True, + ), + # Service overdue from previous week (by more than an hour) + ( + { + "interval": PERIODIC_INTERVAL_HOUR, + "last_periodic_run": datetime(2025, 2, 4, 9, 45, 45, tzinfo=timezone.utc), + "minute": 45, + "hour": 11, + "day_of_week": 1, # Tuesday + "next_run_at": datetime(2025, 2, 4, 10, 45, 0, tzinfo=timezone.utc), + }, + "2025-02-11 11:46:45", + True, + ), + # Month + # Monthly service due in 2 days + ( + { + "interval": PERIODIC_INTERVAL_MONTH, + "last_periodic_run": None, + "minute": 34, + "hour": 10, + "day_of_month": 12, + "next_run_at": datetime(2025, 2, 12, 10, 34, 0, tzinfo=timezone.utc), + }, + "2025-02-10 10:30:45", + False, + ), + # Monthly service due next month (passed this month's time) + ( + { + "interval": PERIODIC_INTERVAL_MONTH, + "last_periodic_run": None, + "minute": 34, + "hour": 10, + "day_of_month": 11, + "next_run_at": datetime(2025, 3, 11, 10, 34, 0, tzinfo=timezone.utc), + }, + "2025-02-11 10:35:45", + False, + ), + # Service overdue from previous month + ( + { + "interval": PERIODIC_INTERVAL_MONTH, + "last_periodic_run": datetime(2025, 1, 10, 10, 40, 45, tzinfo=timezone.utc), + "minute": 34, + "hour": 10, + "day_of_month": 11, + "next_run_at": datetime(2025, 1, 11, 10, 34, 0, tzinfo=timezone.utc), + }, + "2025-02-11 10:30:45", + True, + ), + # Service overdue from previous month + ( + { + "interval": PERIODIC_INTERVAL_MONTH, + "last_periodic_run": datetime(2025, 1, 11, 10, 20, 45, tzinfo=timezone.utc), + "minute": 45, + "hour": 11, + "day_of_month": 11, + "next_run_at": datetime(2025, 1, 11, 11, 45, 0, tzinfo=timezone.utc), + }, + "2025-02-11 10:30:45", + True, + ), + # Service overdue from previous month (by more than an hour) + ( + { + "interval": PERIODIC_INTERVAL_MONTH, + "last_periodic_run": datetime(2025, 1, 11, 11, 44, 45, tzinfo=timezone.utc), + "minute": 45, + "hour": 11, + "day_of_month": 11, + "next_run_at": datetime(2025, 1, 11, 11, 45, 0, tzinfo=timezone.utc), + }, + "2025-02-11 11:46:45", + True, + ), +] + +PERIODIC_SERVICE_CALCULATE_NEXT_RUN_CASES = [ + # MINUTE interval tests + ( + PERIODIC_INTERVAL_MINUTE, + 5, # Every 5 minutes + 0, + 0, + 1, + datetime(2025, 2, 15, 10, 30, 0, tzinfo=timezone.utc), + datetime(2025, 2, 15, 10, 35, 0, tzinfo=timezone.utc), + ), + ( + PERIODIC_INTERVAL_MINUTE, + 1, # Every minute + 0, + 0, + 1, + datetime(2025, 2, 15, 10, 30, 0, tzinfo=timezone.utc), + datetime(2025, 2, 15, 10, 31, 0, tzinfo=timezone.utc), + ), + ( + PERIODIC_INTERVAL_MINUTE, + 15, # Every 15 minutes + 0, + 0, + 1, + datetime(2025, 2, 15, 10, 45, 0, tzinfo=timezone.utc), + datetime(2025, 2, 15, 11, 0, 0, tzinfo=timezone.utc), + ), + # HOUR interval tests + ( + PERIODIC_INTERVAL_HOUR, + 25, # At minute 25 of each hour + 0, + 0, + 1, + datetime(2025, 2, 15, 10, 20, 0, tzinfo=timezone.utc), + datetime(2025, 2, 15, 10, 25, 0, tzinfo=timezone.utc), + ), + ( + PERIODIC_INTERVAL_HOUR, + 25, + 0, + 0, + 1, + datetime(2025, 2, 15, 10, 30, 0, tzinfo=timezone.utc), + datetime( + 2025, 2, 15, 11, 25, 0, tzinfo=timezone.utc + ), # Already passed, next hour + ), + ( + PERIODIC_INTERVAL_HOUR, + 0, # On the hour + 0, + 0, + 1, + datetime(2025, 2, 15, 10, 30, 0, tzinfo=timezone.utc), + datetime(2025, 2, 15, 11, 0, 0, tzinfo=timezone.utc), + ), + # DAY interval tests + ( + PERIODIC_INTERVAL_DAY, + 30, # minute + 14, # hour (14:30) + 0, + 1, + datetime(2025, 2, 15, 10, 0, 0, tzinfo=timezone.utc), + datetime(2025, 2, 15, 14, 30, 0, tzinfo=timezone.utc), # Today at 14:30 + ), + ( + PERIODIC_INTERVAL_DAY, + 30, + 14, + 0, + 1, + datetime(2025, 2, 15, 15, 0, 0, tzinfo=timezone.utc), + datetime(2025, 2, 16, 14, 30, 0, tzinfo=timezone.utc), # Tomorrow at 14:30 + ), + ( + PERIODIC_INTERVAL_DAY, + 0, + 0, # Midnight + 0, + 1, + datetime(2025, 2, 15, 23, 30, 0, tzinfo=timezone.utc), + datetime(2025, 2, 16, 0, 0, 0, tzinfo=timezone.utc), + ), + # WEEK interval tests + ( + PERIODIC_INTERVAL_WEEK, + 30, # minute + 14, # hour + 1, # Tuesday + 1, + datetime(2025, 2, 10, 10, 0, 0, tzinfo=timezone.utc), # Monday + datetime(2025, 2, 11, 14, 30, 0, tzinfo=timezone.utc), # Next Tuesday + ), + ( + PERIODIC_INTERVAL_WEEK, + 30, + 14, + 1, # Tuesday + 1, + datetime(2025, 2, 11, 10, 0, 0, tzinfo=timezone.utc), # Tuesday morning + datetime(2025, 2, 11, 14, 30, 0, tzinfo=timezone.utc), # Same Tuesday afternoon + ), + ( + PERIODIC_INTERVAL_WEEK, + 30, + 14, + 1, # Tuesday + 1, + datetime(2025, 2, 11, 15, 0, 0, tzinfo=timezone.utc), # Tuesday after 14:30 + datetime(2025, 2, 18, 14, 30, 0, tzinfo=timezone.utc), # Next Tuesday + ), + ( + PERIODIC_INTERVAL_WEEK, + 30, + 14, + 0, # Monday + 1, + datetime(2025, 2, 15, 10, 0, 0, tzinfo=timezone.utc), # Saturday + datetime(2025, 2, 17, 14, 30, 0, tzinfo=timezone.utc), # Next Monday + ), + # MONTH interval tests + ( + PERIODIC_INTERVAL_MONTH, + 30, # minute + 14, # hour + 0, + 15, # day 15 + datetime(2025, 2, 10, 10, 0, 0, tzinfo=timezone.utc), + datetime(2025, 2, 15, 14, 30, 0, tzinfo=timezone.utc), # This month on 15th + ), + ( + PERIODIC_INTERVAL_MONTH, + 30, + 14, + 0, + 15, + datetime(2025, 2, 15, 15, 0, 0, tzinfo=timezone.utc), # After 14:30 on 15th + datetime(2025, 3, 15, 14, 30, 0, tzinfo=timezone.utc), # Next month on 15th + ), + ( + PERIODIC_INTERVAL_MONTH, + 30, + 14, + 0, + 31, # Day that doesn't exist in all months + datetime(2025, 1, 31, 15, 0, 0, tzinfo=timezone.utc), # Jan 31st + datetime( + 2025, 2, 28, 14, 30, 0, tzinfo=timezone.utc + ), # Last day of Feb (non-leap) + ), + ( + PERIODIC_INTERVAL_MONTH, + 0, + 0, + 0, + 1, # First of month at midnight + datetime(2025, 2, 15, 10, 0, 0, tzinfo=timezone.utc), + datetime(2025, 3, 1, 0, 0, 0, tzinfo=timezone.utc), + ), +] + +PERIODIC_SERVICE_NEXT_RUN_SET_CASES = [ + # MINUTE - every 5 minutes + ( + {"interval": PERIODIC_INTERVAL_MINUTE, "minute": 5}, + "2025-02-15 10:30:00", + datetime(2025, 2, 15, 10, 35, 0, tzinfo=timezone.utc), + ), + # HOUR - every hour at minute 25 + ( + {"interval": PERIODIC_INTERVAL_HOUR, "minute": 25}, + "2025-02-15 10:20:00", + datetime(2025, 2, 15, 10, 25, 0, tzinfo=timezone.utc), + ), + ( + {"interval": PERIODIC_INTERVAL_HOUR, "minute": 25}, + "2025-02-15 10:30:00", + datetime( + 2025, 2, 15, 11, 25, 0, tzinfo=timezone.utc + ), # Past 10:25, so next hour + ), + # DAY - every day at 14:30 + ( + {"interval": PERIODIC_INTERVAL_DAY, "minute": 30, "hour": 14}, + "2025-02-15 10:00:00", + datetime(2025, 2, 15, 14, 30, 0, tzinfo=timezone.utc), + ), + ( + {"interval": PERIODIC_INTERVAL_DAY, "minute": 30, "hour": 14}, + "2025-02-15 15:00:00", + datetime(2025, 2, 16, 14, 30, 0, tzinfo=timezone.utc), # Tomorrow + ), + # WEEK - every Tuesday at 14:30 + ( + { + "interval": PERIODIC_INTERVAL_WEEK, + "minute": 30, + "hour": 14, + "day_of_week": 1, # Tuesday + }, + "2025-02-10 10:00:00", # Monday + datetime(2025, 2, 11, 14, 30, 0, tzinfo=timezone.utc), # Tuesday + ), + # MONTH - 15th of each month at 14:30 + ( + { + "interval": PERIODIC_INTERVAL_MONTH, + "minute": 30, + "hour": 14, + "day_of_month": 15, + }, + "2025-02-10 10:00:00", + datetime(2025, 2, 15, 14, 30, 0, tzinfo=timezone.utc), + ), +] diff --git a/backend/tests/baserow/contrib/integrations/core/test_core_periodic_service_type.py b/backend/tests/baserow/contrib/integrations/core/test_core_periodic_service_type.py index 14d54d9159..4daa834cad 100644 --- a/backend/tests/baserow/contrib/integrations/core/test_core_periodic_service_type.py +++ b/backend/tests/baserow/contrib/integrations/core/test_core_periodic_service_type.py @@ -2,7 +2,6 @@ from unittest.mock import MagicMock, call, patch from django.db import transaction -from django.utils.timezone import now import pytest from freezegun import freeze_time @@ -16,17 +15,19 @@ from baserow.contrib.automation.nodes.registries import automation_node_type_registry from baserow.contrib.automation.workflows.constants import WorkflowState from baserow.contrib.integrations.core.constants import ( - PERIODIC_INTERVAL_DAY, PERIODIC_INTERVAL_HOUR, PERIODIC_INTERVAL_MINUTE, - PERIODIC_INTERVAL_MONTH, - PERIODIC_INTERVAL_WEEK, ) from baserow.contrib.integrations.core.models import CorePeriodicService from baserow.contrib.integrations.core.service_types import CorePeriodicServiceType +from baserow.contrib.integrations.core.utils import calculate_next_periodic_run from baserow.core.services.handler import ServiceHandler from baserow.core.services.registries import service_type_registry +from .cases.core_periodic_service_type import ( + CALL_PERIODIC_SERVICES_THAT_ARE_DUE_CASES, +) + @pytest.mark.django_db def test_periodic_trigger_service_type_generate_schema(data_fixture): @@ -46,7 +47,10 @@ def test_periodic_trigger_service_type_generate_schema(data_fixture): assert CorePeriodicServiceType().generate_schema(service) == { "title": f"Periodic{service.id}Schema", "type": "object", - "properties": {"triggered_at": {"type": "string", "title": "Triggered at"}}, + "properties": { + "triggered_at": {"type": "string", "title": "Previous scheduled run"}, + "next_run_at": {"type": "string", "title": "Next scheduled run"}, + }, } @@ -58,22 +62,22 @@ def test_periodic_trigger_node_creation_and_property_updates(data_fixture): automation=automation, state=WorkflowState.LIVE, create_trigger=False ) - node_handler = AutomationNodeHandler() - service_handler = ServiceHandler() - node_type = automation_node_type_registry.get(CorePeriodicTriggerNodeType.type) service_type = CorePeriodicServiceType() + node_type = automation_node_type_registry.get(CorePeriodicTriggerNodeType.type) - service = service_handler.create_service( - service_type, - interval=PERIODIC_INTERVAL_MINUTE, - minute=15, - hour=10, - ) - trigger_node = node_handler.create_node( - node_type=node_type, - workflow=workflow, - service=service, - ) + with freeze_time("2025-02-15 10:30:45"): + service = ServiceHandler().create_service( + service_type, + interval=PERIODIC_INTERVAL_MINUTE, + minute=15, + hour=10, + ) + service_type.prepare_values({}, user, service) + trigger_node = AutomationNodeHandler().create_node( + node_type=node_type, + workflow=workflow, + service=service, + ) assert trigger_node.workflow == workflow assert trigger_node.service == service @@ -83,21 +87,29 @@ def test_periodic_trigger_node_creation_and_property_updates(data_fixture): assert service_specific.minute == 15 assert service_specific.hour == 10 assert service_specific.last_periodic_run is None - - updated_service = service_handler.update_service( - service_type=service_type, - service=service, - interval=PERIODIC_INTERVAL_HOUR, - minute=30, - hour=14, - day_of_week=2, # Wednesday - ).service + assert service_specific.next_run_at is None + + with freeze_time("2025-02-15 11:00:00"): + updated_service = ( + ServiceHandler() + .update_service( + service_type=service_type, + service=service, + interval=PERIODIC_INTERVAL_HOUR, + minute=30, + hour=14, + day_of_week=2, # Wednesday + ) + .service + ) + service_type.prepare_values({}, user, updated_service) updated_service_specific = updated_service.specific assert updated_service_specific.interval == PERIODIC_INTERVAL_HOUR assert updated_service_specific.minute == 30 assert updated_service_specific.hour == 14 assert updated_service_specific.day_of_week == 2 + assert updated_service_specific.next_run_at is None @pytest.mark.django_db @@ -136,27 +148,26 @@ def test_periodic_service_prepare_values_validates_minute_minimum(data_fixture): @patch( "baserow.contrib.automation.workflows.handler.AutomationWorkflowHandler.start_workflow" ) -def test_call_periodic_services_that_are_not_published( - mock_start_workflow, data_fixture -): +def test_call_periodic_services_in_draft_workflow(mock_start_workflow, data_fixture): user = data_fixture.create_user() automation = data_fixture.create_automation_application(user=user) workflow = data_fixture.create_automation_workflow( automation=automation, state=WorkflowState.DRAFT, create_trigger=False ) + service = data_fixture.create_core_periodic_service( + interval=PERIODIC_INTERVAL_MINUTE, + last_periodic_run=None, + ) data_fixture.create_periodic_trigger_node( workflow=workflow, - service_kwargs={ - "interval": PERIODIC_INTERVAL_MINUTE, - "last_periodic_run": None, - }, + service=service, ) with freeze_time("2025-02-15 10:30:45"): with transaction.atomic(): service_type_registry.get( CorePeriodicServiceType.type - ).call_periodic_services_that_are_due(now()) + ).call_periodic_services_that_are_due() mock_start_workflow.delay.assert_not_called() @@ -165,25 +176,26 @@ def test_call_periodic_services_that_are_not_published( @patch( "baserow.contrib.automation.workflows.handler.AutomationWorkflowHandler.start_workflow" ) -def test_call_periodic_services_that_are_paused(mock_start_workflow, data_fixture): +def test_call_periodic_services_in_paused_workflow(mock_start_workflow, data_fixture): user = data_fixture.create_user() automation = data_fixture.create_automation_application(user=user) workflow = data_fixture.create_automation_workflow( automation=automation, state=WorkflowState.PAUSED, create_trigger=False ) + service = data_fixture.create_core_periodic_service( + interval=PERIODIC_INTERVAL_MINUTE, + last_periodic_run=None, + ) data_fixture.create_periodic_trigger_node( workflow=workflow, - service_kwargs={ - "interval": PERIODIC_INTERVAL_MINUTE, - "last_periodic_run": None, - }, + service=service, ) with freeze_time("2025-02-15 10:30:45"): with transaction.atomic(): service_type_registry.get( CorePeriodicServiceType.type - ).call_periodic_services_that_are_due(now()) + ).call_periodic_services_that_are_due() mock_start_workflow.delay.assert_not_called() @@ -198,12 +210,13 @@ def test_call_periodic_services_that_are_locked(mock_start_workflow, data_fixtur workflow = data_fixture.create_automation_workflow( automation=automation, state=WorkflowState.LIVE, create_trigger=False ) + service = data_fixture.create_core_periodic_service( + interval=PERIODIC_INTERVAL_MINUTE, + last_periodic_run=None, + ) trigger = data_fixture.create_periodic_trigger_node( workflow=workflow, - service_kwargs={ - "interval": PERIODIC_INTERVAL_MINUTE, - "last_periodic_run": None, - }, + service=service, ) with transaction.atomic(using="default-copy"): @@ -215,7 +228,7 @@ def test_call_periodic_services_that_are_locked(mock_start_workflow, data_fixtur with transaction.atomic(): service_type_registry.get( CorePeriodicServiceType.type - ).call_periodic_services_that_are_due(now()) + ).call_periodic_services_that_are_due() mock_start_workflow.delay.assert_not_called() @@ -232,39 +245,52 @@ def test_call_multiple_periodic_services_that_are_due( workflow_1 = data_fixture.create_automation_workflow( automation=automation, state=WorkflowState.LIVE, create_trigger=False ) - data_fixture.create_periodic_trigger_node( - workflow=workflow_1, - service_kwargs={ - "interval": PERIODIC_INTERVAL_MINUTE, - "last_periodic_run": None, - }, - ) workflow_2 = data_fixture.create_automation_workflow( automation=automation, state=WorkflowState.LIVE, create_trigger=False ) - data_fixture.create_periodic_trigger_node( - workflow=workflow_2, - service_kwargs={ - "interval": PERIODIC_INTERVAL_MINUTE, - "last_periodic_run": None, - }, - ) + + # Create services with next_run_at set to now so they trigger immediately + with freeze_time("2025-02-15 10:30:45"): + service_1 = data_fixture.create_core_periodic_service( + interval=PERIODIC_INTERVAL_MINUTE, + last_periodic_run=None, + next_run_at=datetime(2025, 2, 15, 10, 30, 0, tzinfo=timezone.utc), + ) + data_fixture.create_periodic_trigger_node( + workflow=workflow_1, + service=service_1, + ) + service_2 = data_fixture.create_core_periodic_service( + interval=PERIODIC_INTERVAL_MINUTE, + last_periodic_run=None, + next_run_at=datetime(2025, 2, 15, 10, 30, 0, tzinfo=timezone.utc), + ) + data_fixture.create_periodic_trigger_node( + workflow=workflow_2, + service=service_2, + ) with freeze_time("2025-02-15 10:30:45"): with transaction.atomic(): service_type_registry.get( CorePeriodicServiceType.type - ).call_periodic_services_that_are_due(now()) + ).call_periodic_services_that_are_due() assert list(mock_async_start_workflow.call_args_list) == unordered( [ call( workflow_1, - {"triggered_at": "2025-02-15T10:30:00+00:00"}, + { + "triggered_at": "2025-02-15T10:30:00+00:00", + "next_run_at": "2025-02-15T10:31:00+00:00", + }, ), call( workflow_2, - {"triggered_at": "2025-02-15T10:30:00+00:00"}, + { + "triggered_at": "2025-02-15T10:30:00+00:00", + "next_run_at": "2025-02-15T10:31:00+00:00", + }, ), ] ) @@ -273,376 +299,7 @@ def test_call_multiple_periodic_services_that_are_due( @pytest.mark.django_db(transaction=True) @pytest.mark.parametrize( "service_kwargs,frozen_time,should_be_called", - [ - # Minute - ( - { - "interval": PERIODIC_INTERVAL_MINUTE, - "last_periodic_run": None, - }, - "2025-02-15 10:30:45", - # never triggered before, so it must always be triggered. - True, - ), - ( - { - # Compat: no `minute` is provided, so it defaults to "1" (every minute). - "interval": PERIODIC_INTERVAL_MINUTE, - "last_periodic_run": datetime( - 2025, 2, 15, 10, 30, 30, tzinfo=timezone.utc - ), - }, - "2025-02-15 10:30:45", - # 2025-02-15 10:30:45 - 2025-2-15-10 30:30 = 15 seconds, so should not be - # triggered. - False, - ), - ( - { - # Compat: no `minute` is provided, so it defaults to "1" (every minute). - "interval": PERIODIC_INTERVAL_MINUTE, - "last_periodic_run": datetime( - 2025, 2, 15, 10, 30, 0, tzinfo=timezone.utc - ), - }, - "2025-02-15 10:30:45", - # 2025-02-15 10:30:45 - 2025-2-15-10 30:00 = 45 seconds, so should not be - # triggered. - False, - ), - ( - { - # Compat: no `minute` is provided, so it defaults to "1" (every minute). - "interval": PERIODIC_INTERVAL_MINUTE, - "last_periodic_run": datetime( - 2025, 2, 15, 10, 28, 59, tzinfo=timezone.utc - ), - }, - "2025-02-15 10:30:45", - # 2025-02-15 10:30:45 - 2025-2-15-10 28:59 = 1 minute 46 seconds, so should - # be triggered. - True, - ), - ( - { - # Compat: no `minute` is provided, so it defaults to "1" (every minute). - "interval": PERIODIC_INTERVAL_MINUTE, - "last_periodic_run": datetime( - 2025, 1, 16, 2, 59, 59, tzinfo=timezone.utc - ), - }, - "2025-02-15 10:30:45", - # Almost a month ago, so it should be triggered. - True, - ), - ( - { - "minute": 5, - "interval": PERIODIC_INTERVAL_MINUTE, - "last_periodic_run": datetime( - 2025, 11, 6, 12, 0, 0, tzinfo=timezone.utc - ), - }, - "2025-11-06 12:03:00", - # It's been 3 minutes, so it should not be triggered. - False, - ), - ( - { - "minute": 5, - "interval": PERIODIC_INTERVAL_MINUTE, - "last_periodic_run": datetime( - 2025, 11, 6, 12, 0, 0, tzinfo=timezone.utc - ), - }, - "2025-11-06 12:05:00", - # It's been 5 minutes, so it should be triggered. - True, - ), - # Hour - ( - { - "interval": PERIODIC_INTERVAL_HOUR, - "last_periodic_run": None, - "minute": 34, - }, - "2025-02-15 10:30:45", - # Never triggerd before, but it's not past the 34th minute, - # so not triggered. - False, - ), - ( - { - "interval": PERIODIC_INTERVAL_HOUR, - "last_periodic_run": None, - "minute": 34, - }, - "2025-02-15 10:35:45", - # Never triggerd before, but it's not past the 34th minute, - # so not triggered. - True, - ), - ( - { - "interval": PERIODIC_INTERVAL_HOUR, - "last_periodic_run": datetime( - 2025, 2, 15, 10, 5, 45, tzinfo=timezone.utc - ), - "minute": 5, - }, - "2025-02-15 10:30:45", - # 2025-02-15 10:30:45 - 2025-02-15 10:05:45 = 25 minutes ago, - # so it should not be triggered. - False, - ), - ( - { - "interval": PERIODIC_INTERVAL_HOUR, - "last_periodic_run": datetime( - 2025, 2, 15, 9, 45, 45, tzinfo=timezone.utc - ), - "minute": 45, - }, - "2025-02-15 10:30:45", - # 2025-02-15 10:30:45 - 2025-02-15 09:45:30 = 45 minutes ago, - # so it should not be triggered. - False, - ), - ( - { - "interval": PERIODIC_INTERVAL_HOUR, - "last_periodic_run": datetime( - 2025, 2, 15, 9, 27, 45, tzinfo=timezone.utc - ), - "minute": 31, - }, - "2025-02-15 10:30:45", - # 2025-02-15 10:30:45 - 2025-02-15 09:27:30 = 1 hour and 3 minutes ago, - # but not yet past the desired minute, so it should not be triggered. - False, - ), - ( - { - "interval": PERIODIC_INTERVAL_HOUR, - "last_periodic_run": datetime( - 2025, 2, 15, 9, 27, 45, tzinfo=timezone.utc - ), - "minute": 29, - }, - "2025-02-15 10:30:45", - # 2025-02-15 10:30:45 - 2025-02-15 09:27:30 = 1 hour and 3 minutes ago, - # and past the desired minute, so it should be triggered. - True, - ), - # Day - ( - { - "interval": PERIODIC_INTERVAL_DAY, - "last_periodic_run": None, - "minute": 34, - "hour": 10, - }, - "2025-02-15 10:30:45", - # Never triggerd before, but it's not past 11:34, - # so not triggered. - False, - ), - ( - { - "interval": PERIODIC_INTERVAL_DAY, - "last_periodic_run": None, - "minute": 34, - "hour": 10, - }, - "2025-02-15 10:35:45", - # Triggered because it was never triggered before, and it's past 11:34. - True, - ), - ( - { - "interval": PERIODIC_INTERVAL_HOUR, - "last_periodic_run": datetime( - 2025, 2, 14, 10, 40, 45, tzinfo=timezone.utc - ), - "minute": 34, - "hour": 10, - }, - "2025-02-15 10:30:45", - # 2025-02-15 10:30:45 - 2025-02-14 10:40:45 = 23 hours and 10 minutes ago, - # so it should not be triggered. - False, - ), - ( - { - "interval": PERIODIC_INTERVAL_HOUR, - "last_periodic_run": datetime( - 2025, 2, 14, 9, 45, 45, tzinfo=timezone.utc - ), - "minute": 45, - "hour": 11, - }, - "2025-02-15 10:30:45", - # 2025-02-15 10:30:45 - 2025-02-14 09:45:45 = 1 day and 1 hout ago, - # but not yet at 11:45, so it should not be triggered. - False, - ), - ( - { - "interval": PERIODIC_INTERVAL_HOUR, - "last_periodic_run": datetime( - 2025, 2, 14, 9, 45, 45, tzinfo=timezone.utc - ), - "minute": 15, - "hour": 10, - }, - "2025-02-15 10:30:45", - # 2025-02-15 10:30:45 - 2025-02-14 09:45:45 = 1 day and 1 hour ago, - # and it's 10:15, so it should not be triggered. - True, - ), - # Week - ( - { - "interval": PERIODIC_INTERVAL_WEEK, - "last_periodic_run": None, - "minute": 34, - "hour": 10, - "day_of_week": 1, # Tuesday - }, - "2025-02-10 10:30:45", - # Never triggerd before, but it's not past Tuesday 11:34, - # so not triggered. - False, - ), - ( - { - "interval": PERIODIC_INTERVAL_WEEK, - "last_periodic_run": None, - "minute": 34, - "hour": 10, - "day_of_week": 1, # Tuesday - }, - "2025-02-11 10:35:45", - # Triggered because it was never triggered before, and it's past - # Tuesday 11:34. - True, - ), - ( - { - "interval": PERIODIC_INTERVAL_HOUR, - "last_periodic_run": datetime( - 2025, 2, 4, 10, 40, 45, tzinfo=timezone.utc - ), - "minute": 34, - "hour": 10, - "day_of_week": 1, # Tuesday - }, - "2025-02-11 10:30:45", - # 2025-02-15 10:30:45 - 2025-02-04 10:40:45 = 1 week, 23 hours and 10 - # minutes ago, so it should not be triggered. - False, - ), - ( - { - "interval": PERIODIC_INTERVAL_HOUR, - "last_periodic_run": datetime( - 2025, 2, 4, 9, 45, 45, tzinfo=timezone.utc - ), - "minute": 45, - "hour": 11, - "day_of_week": 1, # Tuesday - }, - "2025-02-11 10:30:45", - # 2025-02-15 10:30:45 - 2025-02-04 09:45:45 = 1 week and 1 hour ago, - # but not yet at 11:45, so it should not be triggered. - False, - ), - ( - { - "interval": PERIODIC_INTERVAL_HOUR, - "last_periodic_run": datetime( - 2025, 2, 4, 9, 45, 45, tzinfo=timezone.utc - ), - "minute": 45, - "hour": 11, - "day_of_week": 1, # Tuesday - }, - "2025-02-11 11:46:45", - # 2025-02-15 10:30:45 - 2025-02-04 09:45:45 = 1 week and 1 hour ago, - # and past 11:46 on Tuesday, so should be triggered. - True, - ), - # Month - ( - { - "interval": PERIODIC_INTERVAL_MONTH, - "last_periodic_run": None, - "minute": 34, - "hour": 10, - "day_of_month": 12, - }, - "2025-02-10 10:30:45", - # Never triggerd before, but it's not past 12th 11:34, - # so not triggered. - False, - ), - ( - { - "interval": PERIODIC_INTERVAL_MONTH, - "last_periodic_run": None, - "minute": 34, - "hour": 10, - "day_of_month": 11, - }, - "2025-02-11 10:35:45", - # Triggered because it was never triggered before, and it's past 12th 11:34. - True, - ), - ( - { - "interval": PERIODIC_INTERVAL_MONTH, - "last_periodic_run": datetime( - 2025, 1, 10, 10, 40, 45, tzinfo=timezone.utc - ), - "minute": 34, - "hour": 10, - "day_of_month": 11, - }, - "2025-02-11 10:30:45", - # 2025-02-15 10:30:45 - 2025-01-10 10:40:45 = 1 month, 23 hours and 10 - # minutes ago, so it should not be triggered. - False, - ), - ( - { - "interval": PERIODIC_INTERVAL_MONTH, - "last_periodic_run": datetime( - 2025, 1, 11, 10, 20, 45, tzinfo=timezone.utc - ), - "minute": 45, - "hour": 11, - "day_of_month": 11, - }, - "2025-02-11 10:30:45", - # Should not be triggered. - False, - ), - ( - { - "interval": PERIODIC_INTERVAL_MONTH, - "last_periodic_run": datetime( - 2025, 1, 11, 11, 44, 45, tzinfo=timezone.utc - ), - "minute": 45, - "hour": 11, - "day_of_month": 11, - }, - "2025-02-11 11:46:45", - # 2025-02-15 10:30:45 - 2025-01-11 11:44:45 = 1 week and 1 hour ago, - # and past 11:46 on Tuesday, so should be triggered. - True, - ), - ], + CALL_PERIODIC_SERVICES_THAT_ARE_DUE_CASES, ) def test_call_periodic_services_that_are_due( data_fixture, service_kwargs, frozen_time, should_be_called @@ -652,10 +309,13 @@ def test_call_periodic_services_that_are_due( workflow = data_fixture.create_automation_workflow( automation=automation, state=WorkflowState.LIVE, create_trigger=False ) - trigger = data_fixture.create_periodic_trigger_node( - workflow=workflow, - service_kwargs=service_kwargs, - ) + # Create the service at the frozen time so next_run_at is calculated correctly + with freeze_time(frozen_time): + service = data_fixture.create_core_periodic_service(**service_kwargs) + trigger = data_fixture.create_periodic_trigger_node( + workflow=workflow, + service=service, + ) service_type = service_type_registry.get(CorePeriodicServiceType.type) service_type.on_event = MagicMock() @@ -666,16 +326,27 @@ def test_call_periodic_services_that_are_due( def check_service_count(services, event_payload): if should_be_called: - assert services.count() == 1 - assert event_payload == {"triggered_at": target_date.isoformat()} + assert len(services) == 1 + next_run_at = calculate_next_periodic_run( + services[0].interval, + services[0].minute, + services[0].hour, + services[0].day_of_week, + services[0].day_of_month, + ) + service_payload = event_payload(services[0]) + assert service_payload == { + "triggered_at": target_date.isoformat(), + "next_run_at": next_run_at.isoformat(), + } else: - assert services.count() == 0 + assert len(services) == 0 service_type.on_event.side_effect = check_service_count with freeze_time(frozen_time): with transaction.atomic(): - service_type.call_periodic_services_that_are_due(now()) + service_type.call_periodic_services_that_are_due() trigger.refresh_from_db() service = trigger.service.specific @@ -683,3 +354,6 @@ def check_service_count(services, event_payload): if should_be_called: assert service.last_periodic_run == target_date + # Verify next_run_at was updated to the next scheduled time + assert service.next_run_at is not None + assert service.next_run_at > target_date diff --git a/backend/tests/baserow/contrib/integrations/core/test_core_utils.py b/backend/tests/baserow/contrib/integrations/core/test_core_utils.py new file mode 100644 index 0000000000..68a20b84ae --- /dev/null +++ b/backend/tests/baserow/contrib/integrations/core/test_core_utils.py @@ -0,0 +1,23 @@ +import pytest + +from baserow.contrib.integrations.core.utils import calculate_next_periodic_run + +from .cases.core_periodic_service_type import PERIODIC_SERVICE_CALCULATE_NEXT_RUN_CASES + + +@pytest.mark.parametrize( + "interval,minute,hour,day_of_week,day_of_month,from_time,expected_next_run", + PERIODIC_SERVICE_CALCULATE_NEXT_RUN_CASES, +) +def test_calculate_next_periodic_run( + interval, minute, hour, day_of_week, day_of_month, from_time, expected_next_run +): + result = calculate_next_periodic_run( + interval=interval, + minute=minute, + hour=hour, + day_of_week=day_of_week, + day_of_month=day_of_month, + from_time=from_time, + ) + assert result == expected_next_run diff --git a/backend/tests/baserow/contrib/integrations/migrations/__init__.py b/backend/tests/baserow/contrib/integrations/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/tests/baserow/contrib/integrations/migrations/test_integrations_migrations.py b/backend/tests/baserow/contrib/integrations/migrations/test_integrations_migrations.py new file mode 100644 index 0000000000..cb4fd6ba59 --- /dev/null +++ b/backend/tests/baserow/contrib/integrations/migrations/test_integrations_migrations.py @@ -0,0 +1,166 @@ +from datetime import datetime, timezone + +from freezegun import freeze_time +import pytest + + +@pytest.mark.once_per_day_in_ci +@freeze_time("2026-02-15 10:30:00") +def test_0026_backfill_coreperiodicservice_next_run_at_forwards( + migrator, teardown_table_metadata +): + migrate_from = [ + ("integrations", "0025_coreperiodicservice_next_run_at"), + ] + migrate_to = [ + ("integrations", "0026_backfill_coreperiodicservice_next_run_at"), + ] + + old_state = migrator.migrate(migrate_from) + + # Get models from old state + CorePeriodicService = old_state.apps.get_model( + "integrations", "CorePeriodicService" + ) + Service = old_state.apps.get_model("core", "Service") + ContentType = old_state.apps.get_model("contenttypes", "ContentType") + + # Create ContentType for CorePeriodicService + service_content_type = ContentType.objects.get_for_model(Service) + + # Create test services with different intervals + # All should have next_run_at=None before migration + # Most have last_periodic_run (realistic scenario), one doesn't + # Current time: 2026-02-15 10:30:00 (Sunday) + services_data = [ + # Every 5 minutes + # Sunday 15th, 10:30 now - DUE NOW (last ran 10:25, interval 5 min, next = 10:30) + { + "id": 1, + "content_type": service_content_type, + "interval": "MINUTE", + "minute": 5, + "last_periodic_run": datetime(2026, 2, 15, 10, 25, 0, tzinfo=timezone.utc), + "expected_next_run": datetime(2026, 2, 15, 10, 30, 0, tzinfo=timezone.utc), + }, + # Every hour at minute 45 + # Sunday 15th, 10:30 now - FUTURE RUN (last ran 09:45, next = 10:45) + { + "id": 2, + "content_type": service_content_type, + "interval": "HOUR", + "minute": 45, + "last_periodic_run": datetime(2026, 2, 15, 9, 45, 0, tzinfo=timezone.utc), + "expected_next_run": datetime(2026, 2, 15, 10, 45, 0, tzinfo=timezone.utc), + }, + # Every hour at minute 25 + # Sunday 15th, 10:30 now - OVERDUE (last ran 09:25, next = 10:25 - already passed) + { + "id": 3, + "content_type": service_content_type, + "interval": "HOUR", + "minute": 25, + "last_periodic_run": datetime(2026, 2, 15, 9, 25, 0, tzinfo=timezone.utc), + "expected_next_run": datetime(2026, 2, 15, 10, 25, 0, tzinfo=timezone.utc), + }, + # Every day at 14:30 + # Sunday 15th, 10:30 now - FUTURE RUN (last ran yesterday 14:30, next = today 14:30) + { + "id": 4, + "content_type": service_content_type, + "interval": "DAY", + "minute": 30, + "hour": 14, + "last_periodic_run": datetime(2026, 2, 14, 14, 30, 0, tzinfo=timezone.utc), + "expected_next_run": datetime(2026, 2, 15, 14, 30, 0, tzinfo=timezone.utc), + }, + # Every day at 09:00 + # Sunday 15th, 10:30 now - OVERDUE (last ran yesterday 09:00, next = today 09:00 - already passed) + { + "id": 5, + "content_type": service_content_type, + "interval": "DAY", + "minute": 0, + "hour": 9, + "last_periodic_run": datetime(2026, 2, 14, 9, 0, 0, tzinfo=timezone.utc), + "expected_next_run": datetime(2026, 2, 15, 9, 0, 0, tzinfo=timezone.utc), + }, + # Every Tuesday at 10:00 + # Sunday 15th, 10:30 now - Weekly (last ran Tues Feb 10th, next = this Tues Feb 17th) + { + "id": 6, + "content_type": service_content_type, + "interval": "WEEK", + "minute": 0, + "hour": 10, + "day_of_week": 1, # Tuesday + "last_periodic_run": datetime(2026, 2, 10, 10, 0, 0, tzinfo=timezone.utc), + "expected_next_run": datetime(2026, 2, 17, 10, 0, 0, tzinfo=timezone.utc), + }, + # Every month on the 15th at 14:30 + # Sunday 15th, 10:30 now - Monthly (last ran Jan 15, next = today Feb 15 at 14:30) + { + "id": 7, + "content_type": service_content_type, + "interval": "MONTH", + "minute": 30, + "hour": 14, + "day_of_month": 15, + "last_periodic_run": datetime(2026, 1, 15, 14, 30, 0, tzinfo=timezone.utc), + "expected_next_run": datetime(2026, 2, 15, 14, 30, 0, tzinfo=timezone.utc), + }, + # Every month on the 10th at 09:00 + # Sunday 15th, 10:30 now - Monthly (last ran Feb 10, next = Mar 10) + { + "id": 8, + "content_type": service_content_type, + "interval": "MONTH", + "minute": 0, + "hour": 9, + "day_of_month": 10, + "last_periodic_run": datetime(2026, 2, 10, 9, 0, 0, tzinfo=timezone.utc), + "expected_next_run": datetime(2026, 3, 10, 9, 0, 0, tzinfo=timezone.utc), + }, + # Every 5 minutes + # Sunday 15th, 10:30 now - Never run before (no last_periodic_run, calculates from now: 10:30 + 5 min = 10:35) + { + "id": 9, + "content_type": service_content_type, + "interval": "MINUTE", + "minute": 5, + "last_periodic_run": None, + "expected_next_run": datetime(2026, 2, 15, 10, 35, 0, tzinfo=timezone.utc), + }, + ] + + # Create the services (without next_run_at, which should be None) + # Store expected values separately + expected_values = {} + for data in services_data: + service_id = data["id"] + expected_values[service_id] = { + "next_run_at": data.pop("expected_next_run"), + "interval": data["interval"], + } + CorePeriodicService.objects.create(**data) + + # Verify all services have next_run_at=None before migration + assert CorePeriodicService.objects.filter(next_run_at__isnull=True).count() == 9 + + # Run the migration + new_state = migrator.migrate(migrate_to) + NewCorePeriodicService = new_state.apps.get_model( + "integrations", "CorePeriodicService" + ) + + # Verify all services now have next_run_at calculated + assert NewCorePeriodicService.objects.filter(next_run_at__isnull=True).count() == 0 + + # Verify each service has the correct next_run_at + for service_id, expected_data in expected_values.items(): + service = NewCorePeriodicService.objects.get(id=service_id) + expected_next_run = expected_data["next_run_at"] + + assert ( + service.next_run_at == expected_next_run + ), f"Service {service_id} ({expected_data['interval']}): expected {expected_next_run}, got {service.next_run_at}" diff --git a/changelog/entries/unreleased/bug/improved_the_scheduling_system_in_the_periodic_service.json b/changelog/entries/unreleased/bug/improved_the_scheduling_system_in_the_periodic_service.json new file mode 100644 index 0000000000..8f896c52d0 --- /dev/null +++ b/changelog/entries/unreleased/bug/improved_the_scheduling_system_in_the_periodic_service.json @@ -0,0 +1,9 @@ +{ + "type": "bug", + "message": "Improved the scheduling system in the periodic service.", + "issue_origin": "github", + "issue_number": null, + "domain": "core", + "bullet_points": [], + "created_at": "2026-02-11" +} \ No newline at end of file diff --git a/web-frontend/package.json b/web-frontend/package.json index b08af13b7c..50db503680 100644 --- a/web-frontend/package.json +++ b/web-frontend/package.json @@ -113,7 +113,7 @@ "tldjs": "^2.3.1", "ulid": "^3.0.1", "vite-plugin-node-polyfills": "^0.24.0", - "vite-svg-loader": "^5.1.0", + "vite-svg-loader": "^5.1.1", "vue": "^3.5.25", "vue-chartjs": "^5.3.3", "vue-router": "^4.6.3", diff --git a/web-frontend/yarn.lock b/web-frontend/yarn.lock index dcf0f1536b..a2dd584b7a 100644 --- a/web-frontend/yarn.lock +++ b/web-frontend/yarn.lock @@ -3462,11 +3462,6 @@ "@tiptap/extension-bubble-menu" "^3.13.0" "@tiptap/extension-floating-menu" "^3.13.0" -"@trysound/sax@0.2.0": - version "0.2.0" - resolved "https://registry.yarnpkg.com/@trysound/sax/-/sax-0.2.0.tgz#cccaab758af56761eb7bf37af6f03f326dd798ad" - integrity sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA== - "@tybys/wasm-util@^0.10.0", "@tybys/wasm-util@^0.10.1": version "0.10.1" resolved "https://registry.yarnpkg.com/@tybys/wasm-util/-/wasm-util-0.10.1.tgz#ecddd3205cf1e2d5274649ff0eedd2991ed7f414" @@ -10182,10 +10177,10 @@ sass@~1.70.0: immutable "^4.0.0" source-map-js ">=0.6.2 <2.0.0" -sax@^1.4.1: - version "1.4.4" - resolved "https://registry.yarnpkg.com/sax/-/sax-1.4.4.tgz#f29c2bba80ce5b86f4343b4c2be9f2b96627cf8b" - integrity sha512-1n3r/tGXO6b6VXMdFT54SHzT9ytu9yr7TaELowdYpMqY/Ao7EnlQGmAQ1+RatX7Tkkdm6hONI2owqNx2aZj5Sw== +sax@^1.5.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.5.0.tgz#b5549b671069b7aa392df55ec7574cf411179eb8" + integrity sha512-21IYA3Q5cQf089Z6tgaUTr7lDAyzoTPx5HRtbhsME8Udispad8dC/+sziTNugOEx54ilvatQ9YCzl4KQLPcRHA== scheduler@^0.27.0: version "0.27.0" @@ -10790,23 +10785,23 @@ svg-tags@^1.0.0: resolved "https://registry.yarnpkg.com/svg-tags/-/svg-tags-1.0.0.tgz#58f71cee3bd519b59d4b2a843b6c7de64ac04764" integrity sha512-ovssysQTa+luh7A5Weu3Rta6FJlFBBbInjOh722LIt6klpU2/HtdUbszju/G4devcvk8PGt7FCLv5wftu3THUA== -svgo@^3.0.2: - version "3.3.2" - resolved "https://registry.yarnpkg.com/svgo/-/svgo-3.3.2.tgz#ad58002652dffbb5986fc9716afe52d869ecbda8" - integrity sha512-OoohrmuUlBs8B8o6MB2Aevn+pRIH9zDALSR+6hhqVfa6fRwG/Qw9VUMSMW9VNg2CFc/MTIfabtdOVl9ODIJjpw== +svgo@^3.3.3: + version "3.3.3" + resolved "https://registry.yarnpkg.com/svgo/-/svgo-3.3.3.tgz#8246aee0b08791fde3b0ed22b5661b471fadf58e" + integrity sha512-+wn7I4p7YgJhHs38k2TNjy1vCfPIfLIJWR5MnCStsN8WuuTcBnRKcMHQLMM2ijxGZmDoZwNv8ipl5aTTen62ng== dependencies: - "@trysound/sax" "0.2.0" commander "^7.2.0" css-select "^5.1.0" css-tree "^2.3.1" css-what "^6.1.0" csso "^5.0.5" picocolors "^1.0.0" + sax "^1.5.0" svgo@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/svgo/-/svgo-4.0.0.tgz#17e0fa2eaccf429e0ec0d2179169abde9ba8ad3d" - integrity sha512-VvrHQ+9uniE+Mvx3+C9IEe/lWasXCU0nXMY2kZeLrHNICuRiC8uMPyM14UEaMOFA5mhyQqEkB02VoQ16n3DLaw== + version "4.0.1" + resolved "https://registry.yarnpkg.com/svgo/-/svgo-4.0.1.tgz#c82dacd04ee9f1d55cd4e0b7f9a214c86670e3ee" + integrity sha512-XDpWUOPC6FEibaLzjfe0ucaV0YrOjYotGJO1WpF0Zd+n6ZGEQUsSugaoLq9QkEZtAfQIxT42UChcssDVPP3+/w== dependencies: commander "^11.1.0" css-select "^5.1.0" @@ -10814,7 +10809,7 @@ svgo@^4.0.0: css-what "^6.1.0" csso "^5.0.5" picocolors "^1.1.1" - sax "^1.4.1" + sax "^1.5.0" system-architecture@^0.1.0: version "0.1.0" @@ -11482,12 +11477,13 @@ vite-plugin-vue-tracer@^1.1.3: pathe "^2.0.3" source-map-js "^1.2.1" -vite-svg-loader@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/vite-svg-loader/-/vite-svg-loader-5.1.0.tgz#b0b89bd8024bc0f707d0e8d7422446ac01576d94" - integrity sha512-M/wqwtOEjgb956/+m5ZrYT/Iq6Hax0OakWbokj8+9PXOnB7b/4AxESHieEtnNEy7ZpjsjYW1/5nK8fATQMmRxw== +vite-svg-loader@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/vite-svg-loader/-/vite-svg-loader-5.1.1.tgz#27cbd5e092cc2b2b17ba574cfb2ecf72389e2a3c" + integrity sha512-RPzcXA/EpKJA0585x58DBgs7my2VfeJ+j2j1EoHY4Zh82Y7hV4cR1fElgy2aZi85+QSrcLLoTStQ5uZjD68u+Q== dependencies: - svgo "^3.0.2" + debug "^4.3.4" + svgo "^3.3.3" "vite@^6.0.0 || ^7.0.0", vite@^7.2.2, vite@^7.2.7: version "7.3.1"