From 3da4797a63e9fe8cfd0f932b139300747857bb8b Mon Sep 17 00:00:00 2001 From: Cezary Date: Thu, 5 Feb 2026 09:11:48 +0100 Subject: [PATCH 1/5] chore: ai auto update debounce (#4378) * run ai model values in paralllel #4227 * review fixes * reworked ai threaded execution * review fixes * lint fix after rebase/conflict, removed unused serializer fields * Debounce ai field value generation from auto-update. #4317 * failing test sanity check * review fixes * review fixes * review fix * review fixes * Ensure tasks are re-scheduled on changes during generation * Debug: Add container logs on E2E service startup failure * Move serializer to the correct file * lint fixes * Fix filter empty values * address feedback * Fix flaky test * Fix tests * Add comment for sync=True, as suggested --------- Co-authored-by: Davide Silvestri --- .github/workflows/ci.yml | 15 ++ .../src/baserow/celery_singleton_backend.py | 45 ++++ .../baserow/contrib/database/search/tasks.py | 58 +---- backend/src/baserow/core/jobs/tasks.py | 2 +- .../baserow/core/test_basic_permissions.py | 2 +- ...ounce_ai_field_value_from_auto_update.json | 9 + docs/installation/configuration.md | 1 + .../baserow_premium/api/fields/serializers.py | 12 + premium/backend/src/baserow_premium/apps.py | 2 + .../config/settings/settings.py | 6 + .../src/baserow_premium/fields/field_types.py | 6 +- .../src/baserow_premium/fields/job_types.py | 237 ++++++++++-------- .../src/baserow_premium/fields/models.py | 60 ++++- .../src/baserow_premium/fields/tasks.py | 175 +++++++++++++ .../0031_ai_field_scheduled_update.py | 61 +++++ premium/backend/src/baserow_premium/models.py | 4 +- .../fields/test_ai_field_tasks.py | 111 ++++++++ .../test_generate_ai_values_job_type.py | 20 +- .../test_ai_parallel_execution.py | 23 +- .../field/GenerateAIValuesJobListItem.vue | 5 +- .../field/GenerateAIValuesModal.vue | 3 +- .../modules/baserow_premium/locales/en.json | 3 +- 22 files changed, 664 insertions(+), 196 deletions(-) create mode 100644 changelog/entries/unreleased/refactor/4317_debounce_ai_field_value_from_auto_update.json create mode 100644 premium/backend/src/baserow_premium/fields/tasks.py create mode 100644 premium/backend/src/baserow_premium/migrations/0031_ai_field_scheduled_update.py create mode 100644 premium/backend/tests/baserow_premium_tests/fields/test_ai_field_tasks.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b55715e5e2..907f1f1596 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -733,6 +733,21 @@ jobs: cd e2e-tests ./wait-for-services.sh + - name: Show container logs on failure + if: failure() + run: | + echo "=== Backend container logs ===" + docker logs backend 2>&1 | tail -500 || true + echo "" + echo "=== Celery container logs ===" + docker logs celery 2>&1 | tail -200 || true + echo "" + echo "=== Web-frontend container logs ===" + docker logs web-frontend 2>&1 | tail -200 || true + echo "" + echo "=== Container status ===" + docker ps -a + - name: Run E2E tests (shard ${{ matrix.shard }}) env: PUBLIC_BACKEND_URL: http://localhost:8000 diff --git a/backend/src/baserow/celery_singleton_backend.py b/backend/src/baserow/celery_singleton_backend.py index 7d3393060d..48733d9c54 100644 --- a/backend/src/baserow/celery_singleton_backend.py +++ b/backend/src/baserow/celery_singleton_backend.py @@ -1,3 +1,6 @@ +from django.conf import settings +from django.core.cache import cache + from celery_singleton.backends import RedisBackend from django_redis import get_redis_connection @@ -9,3 +12,45 @@ def __init__(self, *args, **kwargs): """ self.redis = get_redis_connection("default") + + +class SingletonAutoRescheduleFlag: + """ + Flag is used to indicate that a task of this type is pending reschedule. + + When the task ends, if this flag is set, it will re-schedule itself to + ensure that task is eventually run. + """ + + def __init__(self, key: str): + self.key = key + + def is_set(self) -> bool: + """ + Checks if the flag is set. + + :return: True if the lock is set, False otherwise. + """ + + return cache.get(key=self.key) or False + + def set(self) -> bool: + """ + Sets the flag for the task, indicating it needs to be rescheduled. + + :return: True if the flag was set, False if it was already set. + """ + + return cache.set( + key=self.key, + value=True, + timeout=settings.AUTO_INDEX_LOCK_EXPIRY * 2, + ) + + def clear(self) -> bool: + """ + Clears the flag for the task. + :return: True if the flag was cleared, False otherwise. + """ + + return cache.delete(key=self.key) diff --git a/backend/src/baserow/contrib/database/search/tasks.py b/backend/src/baserow/contrib/database/search/tasks.py index 1e7c552ee3..ac6a5d0404 100644 --- a/backend/src/baserow/contrib/database/search/tasks.py +++ b/backend/src/baserow/contrib/database/search/tasks.py @@ -2,13 +2,13 @@ from typing import List, Optional from django.conf import settings -from django.core.cache import cache from django.db.models import Q from celery_singleton import DuplicateTaskError, Singleton from django_cte import With from loguru import logger +from baserow.celery_singleton_backend import SingletonAutoRescheduleFlag from baserow.config.celery import app from baserow.contrib.database.search.models import PendingSearchValueUpdate from baserow.contrib.database.table.exceptions import TableDoesNotExist @@ -17,53 +17,8 @@ PERIODIC_CHECK_TIME_LIMIT = 60 * PERIODIC_CHECK_MINUTES # 15 minutes. -class PendingSearchUpdateFlag: - """ - Flag is used to indicate that a search data update task is pending for a - specific table and it has not been possible to schedule it yet due to a concurrent - task already running for the same table. - - When the task ends, if this flag is set, it will re-schedule itself to ensure that - the search data is eventually updated. - """ - - def __init__(self, table_id: int): - self.table_id = table_id - - @property - def key(self): - """ - Returns the cache key to use for the table lock. - """ - - return f"database_search_data_lock_{self.table_id}" - - def get(self): - """ - Gets the lock for the search data update task. - - :return: True if the lock is set, False otherwise. - """ - - return cache.get(key=self.key) - - def set(self): - """ - Sets the lock for the search data update task. - """ - - return cache.set( - key=self.key, - value=True, - timeout=settings.AUTO_INDEX_LOCK_EXPIRY * 2, - ) - - def clear(self): - """ - Clears the lock for the search data update task. - """ - - return cache.delete(key=self.key) +def _get_singleton_autoreschedule_flag(table_id: int) -> SingletonAutoRescheduleFlag: + return SingletonAutoRescheduleFlag(f"database_search_data_lock_{table_id}") @app.task(queue="export") @@ -114,7 +69,8 @@ def schedule_update_search_data( # There are new updates pending to be processed, make sure the flag is set # so the task will be re-scheduled at the end of the current run. if new_pending_updates: - PendingSearchUpdateFlag(table_id).set() + flag = _get_singleton_autoreschedule_flag(table_id) + flag.set() @app.task( @@ -162,13 +118,13 @@ def update_search_data(table_id: int): SearchHandler.initialize_missing_search_data(table) # Make sure newer updates will re-schedule this task at the end if needed. - flag = PendingSearchUpdateFlag(table_id) + flag = _get_singleton_autoreschedule_flag(table_id) flag.clear() SearchHandler.process_search_data_updates(table) # If new updates were queued during processing, schedule another update - if flag.get(): + if flag.is_set(): logger.debug( f"New updates detected, rescheduling the task for table {table_id}." ) diff --git a/backend/src/baserow/core/jobs/tasks.py b/backend/src/baserow/core/jobs/tasks.py index 2a277c1764..e1587dbadc 100644 --- a/backend/src/baserow/core/jobs/tasks.py +++ b/backend/src/baserow/core/jobs/tasks.py @@ -46,7 +46,7 @@ def run_async_job(self, job_id: int): job.set_state_cancelled() job.save() except BaseException as e: - # We also want to catch SystemExit exception here and all other possible + # BaseException allows catching SystemExit exceptions and all other possible # exceptions to set the job state in a failed state. error = f"Something went wrong during the {job_type.type} job execution." diff --git a/backend/tests/baserow/core/test_basic_permissions.py b/backend/tests/baserow/core/test_basic_permissions.py index 14c1c6c2e5..4efacf7e8e 100755 --- a/backend/tests/baserow/core/test_basic_permissions.py +++ b/backend/tests/baserow/core/test_basic_permissions.py @@ -243,7 +243,7 @@ def test_workspace_member_permission_manager(data_fixture, django_assert_num_que perm_manager.check_permissions( user, ListApplicationsWorkspaceOperationType.type, workspace_2, workspace_2 ) - except Exception: # noqa:W0718 + except Exception: # noqa ... with django_assert_num_queries(0): diff --git a/changelog/entries/unreleased/refactor/4317_debounce_ai_field_value_from_auto_update.json b/changelog/entries/unreleased/refactor/4317_debounce_ai_field_value_from_auto_update.json new file mode 100644 index 0000000000..48d0d14943 --- /dev/null +++ b/changelog/entries/unreleased/refactor/4317_debounce_ai_field_value_from_auto_update.json @@ -0,0 +1,9 @@ +{ + "type": "refactor", + "message": "Debounce AI field value generation, that has been triggered from auto update.", + "issue_origin": "github", + "issue_number": 4317, + "domain": "database", + "bullet_points": [], + "created_at": "2025-12-03" +} diff --git a/docs/installation/configuration.md b/docs/installation/configuration.md index 6a3dc402de..aa96df71f0 100644 --- a/docs/installation/configuration.md +++ b/docs/installation/configuration.md @@ -150,6 +150,7 @@ The installation methods referred to in the variable descriptions are: | BASEROW\_OLLAMA\_HOST | Provide an OLLAMA host to allow using OLLAMA for generative AI features like the AI field. | | | BASEROW\_OLLAMA\_MODELS | Provide a comma separated list of Ollama models (https://ollama.com/library) that you would like to enable in the instance (e.g. `llama2`). Note that this only works if an Ollama host is set. If this variable is not provided, the user won't be able to choose a model. | | | BASEROW\_AI\_FIELD\_MAX\_CONCURRENT\_GENERATIONS | If AI field values are recalculated in a large number (i.e. recalculating whole table, empty rows, or a selection of rows), this controls the number of concurrent requests issued to AI model to generate values. | 5 | +| BASEROW\_AI\_FIELD\_AUTO\_UPDATE\_DEBOUNCE\_TIME | Debounce time in seconds for AI field updates scheduled from auto-update feature. If AI field has auto-update feature enabled, and many changes occur on fields that are referenced by that AI field, this will delay AI field generation by a number of seconds to accumulate many short updates into one bigger. | 3 | ### Backend Misc Configuration | Name | Description | Defaults | diff --git a/premium/backend/src/baserow_premium/api/fields/serializers.py b/premium/backend/src/baserow_premium/api/fields/serializers.py index 5e73c90a84..07fffccdbe 100644 --- a/premium/backend/src/baserow_premium/api/fields/serializers.py +++ b/premium/backend/src/baserow_premium/api/fields/serializers.py @@ -37,3 +37,15 @@ class GenerateFormulaWithAIRequestSerializer(serializers.Serializer): class GenerateFormulaWithAIResponseSerializer(serializers.Serializer): formula = serializers.CharField(help_text="The formula generated by the AI.") + + +class GenerateAIValuesJobFiltersSerializer(serializers.Serializer): + """ + Adds the ability to filter GenerateAIValuesJob by AI field ID. + """ + + generate_ai_values_field_id = serializers.IntegerField( + min_value=1, + required=False, + help_text="Filter by the AI field ID.", + ) diff --git a/premium/backend/src/baserow_premium/apps.py b/premium/backend/src/baserow_premium/apps.py index 26e87deb5a..b42a16ce10 100644 --- a/premium/backend/src/baserow_premium/apps.py +++ b/premium/backend/src/baserow_premium/apps.py @@ -263,3 +263,5 @@ def ready(self): ) widget_type_registry.register(ChartWidgetType()) widget_type_registry.register(PieChartWidgetType()) + + from baserow_premium.fields import tasks # noqa: F401 diff --git a/premium/backend/src/baserow_premium/config/settings/settings.py b/premium/backend/src/baserow_premium/config/settings/settings.py index 8066f325b2..6e02c3492f 100644 --- a/premium/backend/src/baserow_premium/config/settings/settings.py +++ b/premium/backend/src/baserow_premium/config/settings/settings.py @@ -40,3 +40,9 @@ def setup(settings): settings.BASEROW_AI_FIELD_MAX_CONCURRENT_GENERATIONS = try_int( os.getenv("BASEROW_AI_FIELD_MAX_CONCURRENT_GENERATIONS"), 5 ) + + # Debounce time for AI field generation, if changes are triggered from + # auto-update feature. In seconds. + settings.BASEROW_AI_FIELD_AUTO_UPDATE_DEBOUNCE_TIME = try_int( + os.getenv("BASEROW_AI_FIELD_AUTO_UPDATE_DEBOUNCE_TIME"), 3 + ) diff --git a/premium/backend/src/baserow_premium/fields/field_types.py b/premium/backend/src/baserow_premium/fields/field_types.py index 6b917feb77..20b0c2bf82 100644 --- a/premium/backend/src/baserow_premium/fields/field_types.py +++ b/premium/backend/src/baserow_premium/fields/field_types.py @@ -35,11 +35,11 @@ GenerativeAIWithFilesModelType, generative_ai_model_type_registry, ) -from baserow.core.jobs.handler import JobHandler from baserow_premium.api.fields.exceptions import ( ERROR_GENERATIVE_AI_DOES_NOT_SUPPORT_FILE_FIELD, ) from baserow_premium.fields.exceptions import GenerativeAITypeDoesNotSupportFileField +from baserow_premium.fields.tasks import schedule_ai_field_generation from baserow_premium.license.features import PREMIUM from baserow_premium.license.handler import LicenseHandler @@ -392,8 +392,8 @@ def _handle_dependent_rows_change( row_ids = [starting_row.id] transaction.on_commit( - lambda: JobHandler().create_and_start_job( - user, "generate_ai_values", field_id=field.id, row_ids=row_ids + lambda: schedule_ai_field_generation.delay( + field_id=field.id, row_ids=row_ids ) ) diff --git a/premium/backend/src/baserow_premium/fields/job_types.py b/premium/backend/src/baserow_premium/fields/job_types.py index eba4395a5c..458d1114f7 100644 --- a/premium/backend/src/baserow_premium/fields/job_types.py +++ b/premium/backend/src/baserow_premium/fields/job_types.py @@ -1,10 +1,11 @@ -from collections.abc import Iterator +from collections.abc import Callable, Iterator from concurrent.futures import Executor, ThreadPoolExecutor +from datetime import datetime, timezone from queue import Empty, Queue -from typing import Any, Type +from typing import Any, NamedTuple, Type from django.contrib.auth.models import AbstractUser -from django.db.models import QuerySet +from django.db.models import Exists, OuterRef, QuerySet from loguru import logger from rest_framework import serializers @@ -39,23 +40,18 @@ from baserow.core.job_types import _empty_transaction_context from baserow.core.jobs.exceptions import MaxJobCountExceeded from baserow.core.jobs.registries import JobType -from baserow.core.utils import ChildProgressBuilder, Progress +from baserow.core.utils import ChildProgressBuilder from baserow_premium.generative_ai.managers import AIFileManager -from .models import AIField, GenerateAIValuesJob +from .models import AIField, AIFieldScheduledUpdate, GenerateAIValuesJob from .registries import ai_field_output_registry -class GenerateAIValuesJobFiltersSerializer(serializers.Serializer): - """ - Adds the ability to filter GenerateAIValuesJob by AI field ID. - """ - - generate_ai_values_field_id = serializers.IntegerField( - min_value=1, - required=False, - help_text="Filter by the AI field ID.", - ) +class AIValueUpdate(NamedTuple): + row: Type[GeneratedTableModel] + result: Any | Exception + start_at: datetime + end_at: datetime def get_valid_generative_ai_model_type_or_raise(ai_field: AIField): @@ -95,6 +91,7 @@ class GenerateAIValuesJobType(JobType): "row_ids", "view_id", "only_empty", + "is_auto_update", ] serializer_field_overrides = { "field_id": serializers.IntegerField( @@ -116,6 +113,12 @@ class GenerateAIValuesJobType(JobType): help_text="Whether to only generate AI values for rows where the " "field is empty.", ), + "is_auto_update": serializers.BooleanField( + required=False, + read_only=True, + help_text="Indicates if the job has been created because values in a " + "dependent field changed.", + ), } def can_schedule_or_raise(self, job: GenerateAIValuesJob): @@ -129,7 +132,7 @@ def can_schedule_or_raise(self, job: GenerateAIValuesJob): """ # No limits when specific row IDs are provided - if job.row_ids: + if job.row_ids or job.is_auto_update: return running_jobs = ( @@ -222,10 +225,17 @@ def prepare_values(self, values, user): # Create the job instance without saving it yet, so we can use its mode property unsaved_job = GenerateAIValuesJob(**values) + prepared_values = { + "field_id": ai_field.id, + } get_valid_generative_ai_model_type_or_raise(ai_field) - if unsaved_job.mode == GenerateAIValuesJob.MODES.ROWS: + if unsaved_job.mode == GenerateAIValuesJob.MODES.AUTO_UPDATE: + if not AIFieldScheduledUpdate.objects.filter(field_id=ai_field.id).exists(): + raise ValueError("No rows scheduled for AI field auto update.") + prepared_values["is_auto_update"] = True + elif unsaved_job.mode == GenerateAIValuesJob.MODES.ROWS: found_rows_ids = ( RowHandler().get_rows(model, req_row_ids).values_list("id", flat=True) ) @@ -233,11 +243,15 @@ def prepare_values(self, values, user): raise RowDoesNotExist( sorted(list(set(req_row_ids) - set(found_rows_ids))) ) + prepared_values["row_ids"] = req_row_ids elif unsaved_job.mode == GenerateAIValuesJob.MODES.VIEW: # Ensure the view exists in the table ViewHandler().get_view_as_user(user, view_id, table_id=ai_field.table.id) + prepared_values["view_id"] = view_id + + prepared_values["only_empty"] = values.get("only_empty", False) - return values + return prepared_values def get_filters_serializer(self) -> Type[serializers.Serializer] | None: """ @@ -246,15 +260,20 @@ def get_filters_serializer(self) -> Type[serializers.Serializer] | None: :return: A serializer class extending JobTypeFiltersSerializer. """ + from baserow_premium.api.fields.serializers import ( + GenerateAIValuesJobFiltersSerializer, + ) + return GenerateAIValuesJobFiltersSerializer def run(self, job: GenerateAIValuesJob, progress): user = job.user + ai_field = self._get_field(job.field_id) table = ai_field.table workspace = table.database.workspace model = table.get_model() - + row_handler = RowHandler() CoreHandler().check_permissions( job.user, ListFieldsOperationType.type, @@ -266,9 +285,17 @@ def run(self, job: GenerateAIValuesJob, progress): rows = self._get_view_queryset(user, job.view_id, table.id) elif job.mode == GenerateAIValuesJob.MODES.TABLE: rows = model.objects.all() - elif job.mode == GenerateAIValuesJob.MODES.ROWS: + elif job.mode == GenerateAIValuesJob.MODES.AUTO_UPDATE: + rows = model.objects.filter( + Exists( + AIFieldScheduledUpdate.objects.filter( + field_id=ai_field.id, row_id=OuterRef("id") + ) + ) + ) + elif job.mode in {GenerateAIValuesJob.MODES.ROWS}: req_row_ids = job.row_ids - rows = RowHandler().get_rows(model, req_row_ids) + rows = row_handler.get_rows(model, req_row_ids) else: raise ValueError(f"Unknown mode {job.mode} for GenerateAIValuesJob") @@ -280,7 +307,65 @@ def run(self, job: GenerateAIValuesJob, progress): ) rows_progress = ChildProgressBuilder.build(progress_builder, rows.count()) - generator = AIValueGenerator(user, ai_field, self, rows_progress) + + def on_progress(value_update: AIValueUpdate): + """ + Called when a row has been processed, and a result from AI model has been + retrieved. + + This is called from AIValueGenerator, to inform that a row has been + processed, and there's a specific result of that processing. If the value + is an exception, that means the processing ended with an error. + + :param result: AIValueResult object with the row, result and timing + information. + """ + + from baserow_premium.fields.tasks import ( + _schedule_generate_ai_value_generation, + ) + + rows_progress.increment() + row = value_update.row + start_at = value_update.start_at + + if isinstance(value_update.result, Exception): + rows_ai_values_generation_error.send( + self, + user=user, + rows=[row], + field=ai_field, + table=table, + error_message=str(value_update.result), + ) + return + + if job.is_auto_update: + deleted_count, _ = AIFieldScheduledUpdate.objects.filter( + field_id=ai_field.id, row_id=row.id, updated_on__lte=start_at + ).delete() + # The scheduled update was removed or updated after the job + # started, so we skip updating this row with an already outdated + # value, and we renschedule generation for it. + if deleted_count == 0: + _schedule_generate_ai_value_generation(field_id=ai_field.id) + return + + try: + row_handler.update_row_by_id( + user, + table, + row.id, + {ai_field.db_column: value_update.result}, + model=model, + values_already_prepared=True, + ) + except RowDoesNotExist: + # The row was trahsed during the generation and we cannot update + # it, so we skip it. + return + + generator = AIValueGenerator(user, ai_field, self, on_progress) generator.process(rows.order_by("id")) @@ -305,7 +390,7 @@ def __init__( user: AbstractUser, ai_field: AIField, signal_sender: GenerateAIValuesJob | Any | None = None, - progress: Progress | None = None, + on_progress: Callable[[AIValueUpdate], None] | None = None, ): self.user = user @@ -331,10 +416,10 @@ def __init__( self.results_queue = Queue(self.max_concurrency) # Marker to keep track if any errors ocurred during the process. - self.has_errors = False + self.error_msg = None self.row_handler = RowHandler() - self.progress = progress + self.on_progress = on_progress self.prepare() @@ -374,10 +459,6 @@ def prepare(self): ) ) - # FIXME: manually set the websocket_id to None for now because the frontend - # needs to receive the update to stop the loading state - self.user.web_socket_id = None - def generate_value_for(self, row: GeneratedTableModel): """ Runs value generation for a single row using AI model. @@ -389,24 +470,19 @@ def generate_value_for(self, row: GeneratedTableModel): :param row: A row to generate value for. """ + start = datetime.now(tz=timezone.utc) try: result = self._generate_value_for(row) - + end = datetime.now(tz=timezone.utc) self.results_queue.put( - ( - row, - result, - ), + AIValueUpdate(row, result, start, end), block=True, ) except Exception as e: logger.opt(exception=e).error(f"Value generation for row {row} failed: {e}") - + end = datetime.now(tz=timezone.utc) self.results_queue.put( - ( - row, - e, - ), + AIValueUpdate(row, e, start, end), block=True, ) @@ -465,7 +541,7 @@ def _generate_value_for(self, row: GeneratedTableModel) -> Any: value = ai_output_type.parse_output(value, ai_field) return value - def handle_error(self, row: GeneratedTableModel, exc: Exception): + def handle_error(self, error_message: str): """ Error handling routine, if an error occurred during getting AI model response for a row. @@ -473,38 +549,11 @@ def handle_error(self, row: GeneratedTableModel, exc: Exception): If an error occurs, this will stop processing any pending rows and will notify the frontend on a first occurrence of an error. - :param row: A row on which the error occurred. - :param exc: The exception that occurred. - :return: + :param error_message: The exception message to log and send with the signal. """ self.stop_scheduling_rows() - - if not self.has_errors: - rows_ai_values_generation_error.send( - self, - user=self.user, - rows=[row], - field=self.ai_field, - table=self.table, - error_message=str(exc), - ) - - self.has_errors = True - - def update_value(self, row: GeneratedTableModel, value: Any): - """ - Updates AI field value for the row with the value returned from the AI model. - """ - - self.row_handler.update_row_by_id( - self.user, - self.table, - row.id, - {self.ai_field.db_column: value}, - model=self.model, - values_already_prepared=True, - ) + self.error_msg = error_message def raise_if_error(self): """ @@ -515,8 +564,10 @@ def raise_if_error(self): there was an error. """ - if self.has_errors: - raise GenerativeAIPromptError(f"AI model responded with errors.") + if self.error_msg: + raise GenerativeAIPromptError( + f"AI model responded with errors: {self.error_msg}" + ) def process(self, rows: QuerySet[GeneratedTableModel]): """ @@ -556,9 +607,8 @@ def process(self, rows: QuerySet[GeneratedTableModel]): self.stop_scheduling_rows() try: - processed = self.results_queue.get(block=True, timeout=0.1) - row, result = processed - self.handle_result(row, result) + processed = self.results_queue.get(block=True, timeout=0.01) + self.handle_result(processed) # Queue is empty, no processed results available yet; continue polling. except Empty: @@ -593,41 +643,29 @@ def is_finished(self) -> bool: return not len(self.in_process) and not self.generate_more_rows - def handle_result(self, row: GeneratedTableModel, result: Exception | Any): + def handle_result(self, result: AIValueUpdate): """ An entry point to handle the result value for a row. The result may be an error or a correct result, so, depending on its type, it will be handled differently. - A correct value will be stored for the row. + This will update a local state and pass the result to a callback, so the caller + can decide how to handle the result. - The error will be stored and a signal may be emitted, so the frontend will - know about the error. This will also stop processing new rows. - - In any case, we want to update internal progress state. - - :param row: The row for which result arrived. - :param result: The result from the AI model. - :return: + :param result: An AIValueResult object with the result. """ - try: - if isinstance(result, Exception): - self.handle_error(row, result) - else: - self.update_value(row, result) - finally: - self.update_progress(row) - - def update_progress(self, row: GeneratedTableModel): - """ - Update internal progress state. - """ + if isinstance(result.result, Exception): + exc = result.result + self.handle_error(str(exc)) self.finished += 1 - self.in_process.remove(row.id) - if self.progress: - self.progress.increment() + self.in_process.remove(result.row.id) + if self.on_progress: + try: + self.on_progress(result) + except Exception as exc: + self.handle_error(str(exc)) def schedule_next_row(self, rows_iter: Iterator, executor: Executor): """ @@ -635,6 +673,5 @@ def schedule_next_row(self, rows_iter: Iterator, executor: Executor): """ row = next(rows_iter) - executor.submit(self.generate_value_for, row) self.in_process.add(row.id) diff --git a/premium/backend/src/baserow_premium/fields/models.py b/premium/backend/src/baserow_premium/fields/models.py index 3ecb7f899f..372b64d97c 100644 --- a/premium/backend/src/baserow_premium/fields/models.py +++ b/premium/backend/src/baserow_premium/fields/models.py @@ -4,15 +4,13 @@ from django.contrib.auth import get_user_model from django.contrib.postgres.fields import ArrayField from django.db import models +from django.db.models import UniqueConstraint from baserow.contrib.database.fields.models import Field from baserow.core.formula.field import FormulaField as ModelFormulaField -from baserow.core.jobs.mixins import ( - JobWithUndoRedoIds, - JobWithUserIpAddress, - JobWithWebsocketId, -) +from baserow.core.jobs.mixins import JobWithUndoRedoIds, JobWithUserIpAddress from baserow.core.jobs.models import Job +from baserow.core.mixins import BigAutoFieldMixin from .ai_field_output_types import TextAIFieldOutputType from .registries import ai_field_output_registry @@ -68,13 +66,12 @@ def ai_max_concurrent_generations(self) -> int: return settings.BASEROW_AI_FIELD_MAX_CONCURRENT_GENERATIONS -class GenerateAIValuesJob( - JobWithUserIpAddress, JobWithWebsocketId, JobWithUndoRedoIds, Job -): +class GenerateAIValuesJob(JobWithUserIpAddress, JobWithUndoRedoIds, Job): class MODES(StrEnum): ROWS = "rows" VIEW = "view" TABLE = "table" + AUTO_UPDATE = "auto_update" field = models.ForeignKey( Field, @@ -93,12 +90,55 @@ class MODES(StrEnum): only_empty = models.BooleanField( default=False, help_text="Whether to only generate values for empty cells." ) + is_auto_update = models.BooleanField( + null=True, + db_default=False, + default=False, + help_text="If set, the job has been scheduled as a result of AI field auto-update.", + ) + + # TODO: no longer needed. Remove in a feature release + user_websocket_id = models.CharField( + max_length=36, + null=True, + help_text="The user websocket uuid needed to manage signals sent correctly.", + ) @property - def mode(self): - if self.row_ids is not None: + def mode(self) -> MODES: + if self.is_auto_update: + return self.MODES.AUTO_UPDATE + elif self.row_ids is not None: return self.MODES.ROWS elif self.view_id is not None: return self.MODES.VIEW else: # Without filters, generate the values for the whole table return self.MODES.TABLE + + +class AIFieldScheduledUpdate(BigAutoFieldMixin, models.Model): + """ + Stores information about scheduled AI field updates. + + Part of debouncing infrastructure. + """ + + field_id = models.IntegerField(help_text="The ID of the field to update.") + row_id = models.IntegerField(help_text="Row ID to update") + updated_on = models.DateTimeField( + help_text="The time this update was last modified." + ) + + class Meta: + constraints = [ + UniqueConstraint( + fields=["field_id", "row_id"], name="ai_field_id_row_id_uniq" + ) + ] + indexes = [ + # speeds up filtering of old values + models.Index( + name="ai_field_updated_on_idx", + fields=["field_id", "-updated_on"], + ) + ] diff --git a/premium/backend/src/baserow_premium/fields/tasks.py b/premium/backend/src/baserow_premium/fields/tasks.py new file mode 100644 index 0000000000..a298e28bed --- /dev/null +++ b/premium/backend/src/baserow_premium/fields/tasks.py @@ -0,0 +1,175 @@ +from datetime import datetime, timedelta, timezone + +from django.conf import settings +from django.db.models import Exists, OuterRef, Q + +from celery_singleton import DuplicateTaskError, Singleton + +from baserow.celery_singleton_backend import SingletonAutoRescheduleFlag +from baserow.config.celery import app +from baserow.core.jobs.handler import JobHandler +from baserow_premium.fields.job_types import GenerateAIValuesJobType +from baserow_premium.fields.models import AIField, AIFieldScheduledUpdate +from baserow_premium.license.features import PREMIUM +from baserow_premium.license.handler import LicenseHandler + +PERIODIC_CHECK_MINUTES = 5 +PERIODIC_CHECK_TIME_LIMIT = 60 * PERIODIC_CHECK_MINUTES + + +def _get_singleton_autoreschedule_flag(field_id: int) -> SingletonAutoRescheduleFlag: + return SingletonAutoRescheduleFlag(f"ai_field_generation_lock_{field_id}") + + +def _schedule_generate_ai_value_generation(field_id: int): + """ + Actually schedules AI value generation task. + + :param field_id: AI field id. + """ + + try: + generate_scheduled_ai_field_generation.s(field_id=field_id).apply_async( + countdown=settings.BASEROW_AI_FIELD_AUTO_UPDATE_DEBOUNCE_TIME + ) + except DuplicateTaskError: + flag = _get_singleton_autoreschedule_flag(field_id) + flag.set() + + +@app.task( + queue="export", + base=Singleton, + unique_on="field_id", + raise_on_duplicate=True, + lock_expiry=settings.CELERY_SEARCH_UPDATE_HARD_TIME_LIMIT, + soft_time_limit=settings.CELERY_SEARCH_UPDATE_HARD_TIME_LIMIT, + time_limit=settings.CELERY_SEARCH_UPDATE_HARD_TIME_LIMIT, +) +def generate_scheduled_ai_field_generation(field_id: int): + """ + Generates AI field values for rows that have been scheduled for update from AI + field auto-update feature. + + This is essentially a wrapper around calling `generate_ai_values` with proper + parameters. This task is a per-field singleton, but also a job, so it can be + cancelled. + + The job is executed with `is_auto_update` flag, meaning that when the job runs, it + will only process rows that were scheduled for update. + + If the job fails without processing all rows, the remaining scheduled rows will be + still present in the scheduling table, and processed by another task run. + + :param field_id: AI field id. + """ + + jh = JobHandler() + + # Ensure the field still exists and auto-update is still enabled. If not, + # disabling the auto-update ensures all updates will be removed the next time + # the periodic task runs. + try: + field = AIField.objects.select_related( + "ai_auto_update_user", "table__database__workspace" + ).get(id=field_id) + except AIField.DoesNotExist: + return + + user = field.ai_auto_update_user + if not user or not LicenseHandler.user_has_feature( + PREMIUM, user, field.table.database.workspace + ): + field.ai_auto_update = False + field.ai_auto_update_user = None + field.save() + return + + flag = _get_singleton_autoreschedule_flag(field_id) + flag.clear() + + # Synchronously run the job while keeping the singleton lock, to avoid + # multiple concurrent job runs for the same field. + jh.create_and_start_job( + user, + GenerateAIValuesJobType.type, + field_id=field_id, + is_auto_update=True, + sync=True, + ) + + if flag.is_set(): + _schedule_generate_ai_value_generation(field_id) + + +@app.task() +def schedule_ai_field_generation(field_id: int, row_ids: list[int] | None = None): + """ + Populates scheduled rows table for AI field generation. + + If there's no row ids provided, it will just schedule a task. If a row was already + scheduled, its `updated_on` timestamp will be updated. + + :param field_id: AI field id. + :param row_ids: a list of row ids to be updated. + """ + + now = datetime.now(tz=timezone.utc) + if row_ids: + AIFieldScheduledUpdate.objects.bulk_create( + [ + AIFieldScheduledUpdate(field_id=field_id, row_id=row_id, updated_on=now) + for row_id in row_ids + ], + update_conflicts=True, + unique_fields=["field_id", "row_id"], + update_fields=["updated_on"], + batch_size=1000, + ) + + _schedule_generate_ai_value_generation(field_id) + + +@app.task( + queue="export", + base=Singleton, + raise_on_duplicate=False, + soft_time_limit=PERIODIC_CHECK_TIME_LIMIT, + time_limit=PERIODIC_CHECK_TIME_LIMIT, + lock_expiry=PERIODIC_CHECK_TIME_LIMIT, +) +def periodic_reschedule_old_ai_field_generation(): + """ + Removes old rows scheduled for AI field auto-update, and schedules a generation + task, if there are rows remaining to process. + """ + + cutoff = datetime.now(tz=timezone.utc) - timedelta( + hours=settings.HOURS_UNTIL_TRASH_PERMANENTLY_DELETED + ) + + # Delete any old scheduled rows where the associated field no longer exists + # or the auto_update is disabled + AIFieldScheduledUpdate.objects.filter( + Q(updated_on__lte=cutoff) + | ~Exists( + AIField.objects.filter( + id=OuterRef("field_id"), + ai_auto_update=True, + ai_auto_update_user__isnull=False, + ) + ) + ).delete() + + for field_id in AIFieldScheduledUpdate.objects.distinct("field_id").values_list( + "field_id", flat=True + ): + _schedule_generate_ai_value_generation(field_id) + + +@app.on_after_finalize.connect +def setup_periodic_tasks(sender, **kwargs): + sender.add_periodic_task( + timedelta(minutes=PERIODIC_CHECK_MINUTES), + periodic_reschedule_old_ai_field_generation.s(), + ) diff --git a/premium/backend/src/baserow_premium/migrations/0031_ai_field_scheduled_update.py b/premium/backend/src/baserow_premium/migrations/0031_ai_field_scheduled_update.py new file mode 100644 index 0000000000..6bc3fc2192 --- /dev/null +++ b/premium/backend/src/baserow_premium/migrations/0031_ai_field_scheduled_update.py @@ -0,0 +1,61 @@ +# Generated by Django 5.0.14 on 2025-12-22 18:03 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("baserow_premium", "0030_generateaivaluesjob"), + ] + + operations = [ + migrations.AddField( + model_name="generateaivaluesjob", + name="is_auto_update", + field=models.BooleanField( + db_default=False, + default=False, + help_text="If set, the job has been scheduled as a result of AI field auto-update.", + null=True, + ), + ), + migrations.CreateModel( + name="AIFieldScheduledUpdate", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "field_id", + models.IntegerField(help_text="The ID of the field to update."), + ), + ("row_id", models.IntegerField(help_text="Row ID to update")), + ( + "updated_on", + models.DateTimeField( + help_text="The time this update was last modified." + ), + ), + ], + options={ + "indexes": [ + models.Index( + fields=["field_id", "-updated_on"], + name="ai_field_updated_on_idx", + ) + ], + }, + ), + migrations.AddConstraint( + model_name="aifieldscheduledupdate", + constraint=models.UniqueConstraint( + fields=("field_id", "row_id"), name="ai_field_id_row_id_uniq" + ), + ), + ] diff --git a/premium/backend/src/baserow_premium/models.py b/premium/backend/src/baserow_premium/models.py index 962e52bd00..fd4e8eed9c 100644 --- a/premium/backend/src/baserow_premium/models.py +++ b/premium/backend/src/baserow_premium/models.py @@ -1,5 +1,5 @@ -from .fields.models import AIField +from .fields.models import AIField, AIFieldScheduledUpdate from .license.models import License, LicenseUser from .row_comments.models import RowComment -__all__ = ["License", "LicenseUser", "RowComment", "AIField"] +__all__ = ["License", "LicenseUser", "RowComment", "AIField", "AIFieldScheduledUpdate"] diff --git a/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_tasks.py b/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_tasks.py new file mode 100644 index 0000000000..214a3441bd --- /dev/null +++ b/premium/backend/tests/baserow_premium_tests/fields/test_ai_field_tasks.py @@ -0,0 +1,111 @@ +from unittest.mock import patch + +from django.test.utils import override_settings + +import pytest + +from baserow.contrib.database.fields.handler import FieldHandler +from baserow.contrib.database.rows.handler import RowHandler +from baserow_premium.fields.models import AIFieldScheduledUpdate +from baserow_premium.fields.tasks import schedule_ai_field_generation + + +@pytest.mark.django_db(transaction=True) +@pytest.mark.field_ai +def test_ai_field_schedule_update_rows_task(): + with patch( + "baserow_premium.fields.tasks._schedule_generate_ai_value_generation" + ) as mock_generate_ai_type: + schedule_ai_field_generation(field_id=1, row_ids=[1, 2, 3, 4]) + schedule_ai_field_generation(field_id=1, row_ids=[3, 4, 5]) + + assert mock_generate_ai_type.call_count == 2 + + scheduled = list(AIFieldScheduledUpdate.objects.all()) + assert len(scheduled) == 5 + assert [r.row_id for r in scheduled] == [1, 2, 3, 4, 5] + + +@pytest.mark.django_db(transaction=True) +@pytest.mark.field_ai +def test_ai_field_schedule_update_no_rows_task(): + """ + Test if empty rows list will schedule generation task anyway + """ + + with patch( + "baserow_premium.fields.tasks._schedule_generate_ai_value_generation" + ) as mock_generate_ai_type: + schedule_ai_field_generation(field_id=1, row_ids=[]) + + assert mock_generate_ai_type.call_count == 1 + + assert AIFieldScheduledUpdate.objects.count() == 0 + + +@pytest.mark.django_db(transaction=True) +@pytest.mark.field_ai +@override_settings(DEBUG=True) +def test_generate_ai_field_from_scheduled_rows(premium_data_fixture): + """ + Test if the ai value generation task will pick up scheduled rows. + """ + + premium_data_fixture.register_fake_generate_ai_type() + user = premium_data_fixture.create_user( + email="test@test.nl", + password="password", + first_name="Test1", + has_active_premium_license=True, + ) + + database = premium_data_fixture.create_database_application( + user=user, name="database" + ) + table = premium_data_fixture.create_database_table(name="table", database=database) + text_field = premium_data_fixture.create_text_field(table=table, name="text") + ai_field = FieldHandler().create_field( + table=table, + user=user, + name="ai", + type_name="ai", + ai_generative_ai_type="test_generative_ai", + ai_generative_ai_model="test_1", + ai_prompt=f"get('fields.field_{text_field.id}')", + ai_temperature=0.7, + ) + + rows = ( + RowHandler() + .create_rows( + user, + table, + rows_values=[ + {text_field.db_column: "row 1"}, + {text_field.db_column: "row 2"}, + ], + send_webhook_events=False, + send_realtime_update=False, + ) + .created_rows + ) + + assert AIFieldScheduledUpdate.objects.count() == 0 + row_ids = [r.id for r in rows] + + ai_field = FieldHandler().update_field( + user=user, field=ai_field, ai_auto_update=True + ) + + assert ai_field.ai_auto_update_user == user + + schedule_ai_field_generation(field_id=ai_field.id, row_ids=row_ids) + + assert AIFieldScheduledUpdate.objects.count() == 0 + + for row in rows: + row.refresh_from_db() + assert ( + getattr(row, ai_field.db_column) + == f"Generated with temperature 0.7: row {row.id}" + ) diff --git a/premium/backend/tests/baserow_premium_tests/fields/test_generate_ai_values_job_type.py b/premium/backend/tests/baserow_premium_tests/fields/test_generate_ai_values_job_type.py index 1d6664ebcf..4a85489828 100644 --- a/premium/backend/tests/baserow_premium_tests/fields/test_generate_ai_values_job_type.py +++ b/premium/backend/tests/baserow_premium_tests/fields/test_generate_ai_values_job_type.py @@ -828,16 +828,12 @@ def test_generate_ai_field_value_auto_update( ai_auto_update=True, ) - rows = ( - RowHandler() - .create_rows( - user, - table, - rows_values=[{text_field.db_column: "test"}], - send_webhook_events=False, - send_realtime_update=False, - ) - .created_rows + RowHandler().create_rows( + user, + table, + rows_values=[{text_field.db_column: "test"}], + send_webhook_events=False, + send_realtime_update=False, ) assert patched_job_creation.call_count == 1 @@ -846,8 +842,8 @@ def test_generate_ai_field_value_auto_update( # Verify job was created with correct parameters assert call_args.args[0] == user assert call_args.args[1] == "generate_ai_values" + assert call_args.kwargs["is_auto_update"] is True assert call_args.kwargs["field_id"] == ai_field.id - assert call_args.kwargs["row_ids"] == [r.id for r in rows] @pytest.mark.django_db(transaction=True) @@ -940,6 +936,8 @@ def test_generate_ai_field_no_user_task_executed(premium_data_fixture): ) .created_rows ) + assert ai_field.ai_auto_update_user == user + assert ai_field.ai_auto_update row = rows[0] row.refresh_from_db() diff --git a/premium/backend/tests/baserow_premium_tests/generative_ai/test_ai_parallel_execution.py b/premium/backend/tests/baserow_premium_tests/generative_ai/test_ai_parallel_execution.py index 8cf793c592..38407f9963 100644 --- a/premium/backend/tests/baserow_premium_tests/generative_ai/test_ai_parallel_execution.py +++ b/premium/backend/tests/baserow_premium_tests/generative_ai/test_ai_parallel_execution.py @@ -1,4 +1,5 @@ from io import BytesIO +from unittest import mock import pytest @@ -6,7 +7,6 @@ from baserow.core.generative_ai.exceptions import GenerativeAIPromptError from baserow.core.storage import get_default_storage from baserow.core.user_files.handler import UserFileHandler -from baserow.core.utils import Progress from baserow_premium.fields.job_types import AIValueGenerator @@ -49,18 +49,15 @@ def test_ai_parallel_execution(premium_data_fixture): rows = table_model.objects.all() - progress = Progress(len(rows)) - gen = AIValueGenerator( - user=user, - ai_field=ai_field, - progress=progress, - ) + on_progress_mock = mock.Mock() + + gen = AIValueGenerator(user=user, ai_field=ai_field, on_progress=on_progress_mock) gen.process(rows.order_by("id")) assert len(rows) == 30 assert gen.finished == len(rows) - assert not gen.has_errors - assert progress.progress == 30 + assert gen.error_msg is None + assert on_progress_mock.called @pytest.mark.django_db @@ -108,11 +105,11 @@ def test_ai_parallel_execution_with_error(premium_data_fixture): rows = table_model.objects.all() - progress = Progress(len(rows)) + on_progress_mock = mock.Mock() gen = AIValueGenerator( user=user, ai_field=ai_field, - progress=progress, + on_progress=on_progress_mock, ) with pytest.raises(GenerativeAIPromptError): @@ -120,5 +117,5 @@ def test_ai_parallel_execution_with_error(premium_data_fixture): assert len(rows) == 30 assert gen.finished == 5 - assert gen.has_errors - assert progress.progress == 5 + assert gen.error_msg is not None + assert on_progress_mock.called diff --git a/premium/web-frontend/modules/baserow_premium/components/field/GenerateAIValuesJobListItem.vue b/premium/web-frontend/modules/baserow_premium/components/field/GenerateAIValuesJobListItem.vue index 39ab9cf829..367bd22536 100644 --- a/premium/web-frontend/modules/baserow_premium/components/field/GenerateAIValuesJobListItem.vue +++ b/premium/web-frontend/modules/baserow_premium/components/field/GenerateAIValuesJobListItem.vue @@ -68,8 +68,9 @@ export default { }, jobName() { let name = '' - - if (this.jobItem.view_id) { + if (this.jobItem.is_auto_update) { + name = this.$t('generateAIValuesModal.autoUpdate') + } else if (this.jobItem.view_id) { const view = this.views.find((v) => v.id === this.jobItem.view_id) if (view) { name = this.$t('generateAIValuesModal.view', { name: view.name }) diff --git a/premium/web-frontend/modules/baserow_premium/components/field/GenerateAIValuesModal.vue b/premium/web-frontend/modules/baserow_premium/components/field/GenerateAIValuesModal.vue index 977b2a7b67..20bc9793f7 100644 --- a/premium/web-frontend/modules/baserow_premium/components/field/GenerateAIValuesModal.vue +++ b/premium/web-frontend/modules/baserow_premium/components/field/GenerateAIValuesModal.vue @@ -116,7 +116,8 @@ export default { return ( job.type === GenerateAIValuesJobType.getType() && job.field_id === this.field.id && - job.row_ids === null + job.row_ids === null && + job.is_auto_update === false ) } ) diff --git a/premium/web-frontend/modules/baserow_premium/locales/en.json b/premium/web-frontend/modules/baserow_premium/locales/en.json index 2323fc9a48..1f08208a75 100644 --- a/premium/web-frontend/modules/baserow_premium/locales/en.json +++ b/premium/web-frontend/modules/baserow_premium/locales/en.json @@ -385,7 +385,8 @@ "view": "View: {name}", "table": "Table: All rows", "deletedView": "View: ID {viewId} (deleted)", - "rows": "Rows: {count} rows" + "rows": "Rows: {count} rows", + "autoUpdate": "Auto update: referenced fields changed" }, "generateAIValuesForm": { "scopeLabel": "Scope", From 1f825fa644123edc49a6be4da41f1a5492ef6d86 Mon Sep 17 00:00:00 2001 From: Jonathan Adeline Date: Thu, 5 Feb 2026 13:33:14 +0400 Subject: [PATCH 2/5] feat: encapsulate ESLint configuration in a reusable factory function with an `extraSourceFiles` option. (#4658) --- eslint.config.mjs | 147 ++++++++++++++++++++++++---------------------- 1 file changed, 77 insertions(+), 70 deletions(-) diff --git a/eslint.config.mjs b/eslint.config.mjs index 12bd1dd0ec..758ee95e33 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -1,77 +1,84 @@ // @ts-check -import withNuxt from './web-frontend/.nuxt/eslint.config.mjs' -import globals from './web-frontend/node_modules/globals/index.js' -import vitest from './web-frontend/node_modules/eslint-plugin-vitest/dist/index.mjs' -import eslintConfigPrettier from './web-frontend/node_modules/eslint-config-prettier/index.js' +import withNuxt from "./web-frontend/.nuxt/eslint.config.mjs"; +import globals from "./web-frontend/node_modules/globals/index.js"; +import vitest from "./web-frontend/node_modules/eslint-plugin-vitest/dist/index.mjs"; +import eslintConfigPrettier from "./web-frontend/node_modules/eslint-config-prettier/index.js"; -export default withNuxt([ - { - ignores: [ - '**/node_modules/**', - '**/.nuxt/**', - '**/coverage/**', - '**/generated/**', - '**/.nuxt-storybook/**', - '**/dist/**', - '**/.output/**', - '**/.storybook/**', - '**/vitest.setup.ts', - ], - }, - eslintConfigPrettier, // deactivate eslint rules that conflict with prettier - { - files: [ - 'web-frontend/**/*.{js,ts,mjs,mts,jsx,tsx,vue}', - 'premium/web-frontend/**/*.{js,ts,mjs,mts,jsx,tsx,vue}', - 'enterprise/web-frontend/**/*.{js,ts,mjs,mts,jsx,tsx,vue}', - ], - languageOptions: { - globals: { - ...globals.browser, - ...globals.node, - }, - }, - rules: { - 'no-console': 0, - 'vue/no-mutating-props': 0, - 'import/order': 'off', - 'vue/html-self-closing': 'off', - 'vue/no-unused-components': 'warn', - 'vue/no-use-computed-property-like-method': 'off', - 'vue/multi-word-component-names': 'off', - 'vue/no-reserved-component-names': 'off', - 'import/no-named-as-default-member': 'off', - 'no-unused-vars': 'off', - '@typescript-eslint/no-unused-vars': 'off', - '@typescript-eslint/no-explicit-any': 'off', - '@typescript-eslint/no-dynamic-delete': 'off', - 'no-empty': 'off', +// Export factory function for reusability in plugins +export const createBaserowConfig = ({ extraSourceFiles = [] } = {}) => { + return withNuxt([ + { + ignores: [ + "**/node_modules/**", + "**/.nuxt/**", + "**/coverage/**", + "**/generated/**", + "**/.nuxt-storybook/**", + "**/dist/**", + "**/.output/**", + "**/.storybook/**", + "**/vitest.setup.ts", + ], }, - }, - // Premium and Enterprise-specific overrides - { - files: [ - 'premium/web-frontend/**/*.{js,ts,mjs,mts,jsx,tsx,vue}', - 'enterprise/web-frontend/**/*.{js,ts,mjs,mts,jsx,tsx,vue}', - ], - rules: { - 'vue/order-in-components': 'off', + eslintConfigPrettier, // deactivate eslint rules that conflict with prettier + { + files: [ + "web-frontend/**/*.{js,ts,mjs,mts,jsx,tsx,vue}", + "premium/web-frontend/**/*.{js,ts,mjs,mts,jsx,tsx,vue}", + "enterprise/web-frontend/**/*.{js,ts,mjs,mts,jsx,tsx,vue}", + ...extraSourceFiles, + ], + languageOptions: { + globals: { + ...globals.browser, + ...globals.node, + }, + }, + rules: { + "no-console": 0, + "vue/no-mutating-props": 0, + "import/order": "off", + "vue/html-self-closing": "off", + "vue/no-unused-components": "warn", + "vue/no-use-computed-property-like-method": "off", + "vue/multi-word-component-names": "off", + "vue/no-reserved-component-names": "off", + "import/no-named-as-default-member": "off", + "no-unused-vars": "off", + "@typescript-eslint/no-unused-vars": "off", + "@typescript-eslint/no-explicit-any": "off", + "@typescript-eslint/no-dynamic-delete": "off", + "no-empty": "off", + }, }, - }, - // Test files configuration - { - files: [ - '**/*.{test,spec}.{js,ts,jsx,tsx}', - '**/__tests__/**/*.{js,ts,jsx,tsx}', - ], - plugins: { vitest }, - languageOptions: { - globals: { - ...vitest.environments.env.globals, + // Plugin specific overrides + { + files: [ + "premium/web-frontend/**/*.{js,ts,mjs,mts,jsx,tsx,vue}", + "enterprise/web-frontend/**/*.{js,ts,mjs,mts,jsx,tsx,vue}", + ...extraSourceFiles, // Apply these rules to plugins too + ], + rules: { + "vue/order-in-components": "off", }, }, - rules: { - ...vitest.configs.recommended.rules, + // Test files configuration + { + files: [ + "**/*.{test,spec}.{js,ts,jsx,tsx}", + "**/__tests__/**/*.{js,ts,jsx,tsx}", + ], + plugins: { vitest }, + languageOptions: { + globals: { + ...vitest.environments.env.globals, + }, + }, + rules: { + ...vitest.configs.recommended.rules, + }, }, - }, -]) + ]); +}; + +export default createBaserowConfig(); From f218113dd15bf09f6f33efb4e6361e302a707898 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Petr=20St=C5=99=C3=ADbn=C3=BD?= Date: Thu, 5 Feb 2026 11:04:46 +0100 Subject: [PATCH 3/5] Keep CONN_MAX_AGE=0 for all connections (#4655) --- backend/src/baserow/config/settings/base.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/backend/src/baserow/config/settings/base.py b/backend/src/baserow/config/settings/base.py index 51f591875f..e9fd2f6e44 100644 --- a/backend/src/baserow/config/settings/base.py +++ b/backend/src/baserow/config/settings/base.py @@ -218,9 +218,7 @@ # Database # https://docs.djangoproject.com/en/2.2/ref/settings/#databases if "DATABASE_URL" in os.environ: - DATABASES = { - "default": dj_database_url.parse(os.getenv("DATABASE_URL"), conn_max_age=600) - } + DATABASES = {"default": dj_database_url.parse(os.getenv("DATABASE_URL"))} else: DATABASES = { "default": { @@ -246,7 +244,7 @@ if key.startswith("DATABASE_READ_REPLICA_") and key.endswith("_URL"): suffix = key[len("DATABASE_READ_REPLICA_") : -len("_URL")] db_key = f"read_{suffix}" - DATABASES[db_key] = dj_database_url.parse(value, conn_max_age=600) + DATABASES[db_key] = dj_database_url.parse(value) DATABASE_READ_REPLICAS.append(db_key) elif key.startswith("DATABASE_READ_") and key.endswith("_NAME"): suffix = key[len("DATABASE_READ_") : -len("_NAME")] From 9dcd35aa4634a8b271f66d753fb996d7486e41cb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 5 Feb 2026 11:31:15 +0100 Subject: [PATCH 4/5] chore (deps): dump pytest-split from 0.10.0 to 0.11.0 in /backend (#4642) Bumps [pytest-split](https://github.com/jerry-git/pytest-split) from 0.10.0 to 0.11.0. - [Release notes](https://github.com/jerry-git/pytest-split/releases) - [Changelog](https://github.com/jerry-git/pytest-split/blob/master/CHANGELOG.md) - [Commits](https://github.com/jerry-git/pytest-split/compare/0.10.0...0.11.0) --- updated-dependencies: - dependency-name: pytest-split dependency-version: 0.11.0 dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- backend/pyproject.toml | 2 +- backend/uv.lock | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 22262abde1..209dbe574b 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -138,7 +138,7 @@ dev = [ "openapi-spec-validator==0.7.2", "pytest-html==4.1.1", "coverage==7.13.1", - "pytest-split==0.10.0", + "pytest-split==0.11.0", "pytest-unordered==0.7.0", "debugpy==1.8.20", "backports.cached-property==1.0.2", diff --git a/backend/uv.lock b/backend/uv.lock index a87ee7a986..13352cad65 100644 --- a/backend/uv.lock +++ b/backend/uv.lock @@ -477,7 +477,7 @@ dev = [ { name = "pytest-mock", specifier = "==3.15.1" }, { name = "pytest-ordering", specifier = "==0.6" }, { name = "pytest-retry", specifier = "==1.7.0" }, - { name = "pytest-split", specifier = "==0.10.0" }, + { name = "pytest-split", specifier = "==0.11.0" }, { name = "pytest-testmon", specifier = "==2.2.0" }, { name = "pytest-unordered", specifier = "==0.7.0" }, { name = "pytest-xdist", specifier = "==3.8.0" }, @@ -3091,14 +3091,14 @@ wheels = [ [[package]] name = "pytest-split" -version = "0.10.0" +version = "0.11.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/46/d7/e30ba44adf83f15aee3f636daea54efadf735769edc0f0a7d98163f61038/pytest_split-0.10.0.tar.gz", hash = "sha256:adf80ba9fef7be89500d571e705b4f963dfa05038edf35e4925817e6b34ea66f", size = 13903, upload-time = "2024-10-16T15:45:19.783Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/16/8af4c5f2ceb3640bb1f78dfdf5c184556b10dfe9369feaaad7ff1c13f329/pytest_split-0.11.0.tar.gz", hash = "sha256:8ebdb29cc72cc962e8eb1ec07db1eeb98ab25e215ed8e3216f6b9fc7ce0ec2b5", size = 13421, upload-time = "2026-02-03T09:14:31.469Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d6/a7/cad88e9c1109a5c2a320d608daa32e5ee008ccbc766310f54b1cd6b3d69c/pytest_split-0.10.0-py3-none-any.whl", hash = "sha256:466096b086a7147bcd423c6e6c2e57fc62af1c5ea2e256b4ed50fc030fc3dddc", size = 11961, upload-time = "2024-10-16T15:45:18.289Z" }, + { url = "https://files.pythonhosted.org/packages/ae/a1/d4423657caaa8be9b31e491592b49cebdcfd434d3e74512ce71f6ec39905/pytest_split-0.11.0-py3-none-any.whl", hash = "sha256:899d7c0f5730da91e2daf283860eb73b503259cb416851a65599368849c7f382", size = 11911, upload-time = "2026-02-03T09:14:33.708Z" }, ] [[package]] From f6b46c9231eb16540f8eae8b137895d52d299539 Mon Sep 17 00:00:00 2001 From: Peter Evans Date: Thu, 5 Feb 2026 12:12:02 +0000 Subject: [PATCH 5/5] CustomStyleButton needs to use modelValue in vue3, not value. (#4659) --- .../elements/components/forms/style/CustomStyleButton.vue | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/web-frontend/modules/builder/components/elements/components/forms/style/CustomStyleButton.vue b/web-frontend/modules/builder/components/elements/components/forms/style/CustomStyleButton.vue index 8831dd69c6..f3e3388558 100644 --- a/web-frontend/modules/builder/components/elements/components/forms/style/CustomStyleButton.vue +++ b/web-frontend/modules/builder/components/elements/components/forms/style/CustomStyleButton.vue @@ -24,7 +24,7 @@ export default { name: 'CustomStyle', inject: ['openCustomStyleForm'], props: { - value: { + modelValue: { type: Object, required: false, default: () => undefined, @@ -66,7 +66,7 @@ export default { extraArgs: this.extraArgs, onStylesChanged: this.onStylesChanged, configBlockTypes: this.configBlockTypes, - defaultStyleValues: this.value?.[this.styleKey], + defaultStyleValues: this.modelValue?.[this.styleKey], }) }, },