diff --git a/.github/workflows/node.yaml b/.github/workflows/node.yaml index ab8da98054..cd09b51aaf 100644 --- a/.github/workflows/node.yaml +++ b/.github/workflows/node.yaml @@ -37,16 +37,16 @@ jobs: yarn config set cacheFolder /home/runner/lint-core-cache yarn + + # setup zodern:types. No linters are setup, so this simply installs the packages + yarn meteor lint + yarn build:packages env: CI: true - name: Run typecheck and linter run: | cd meteor - - # setup zodern:types. No linters are setup, so this simply installs the packages - meteor lint - yarn ci:lint env: CI: true @@ -78,16 +78,16 @@ jobs: yarn config set cacheFolder /home/runner/test-core-cache yarn + + # setup zodern:types. No linters are setup, so this simply installs the packages + yarn meteor lint + yarn build:packages env: CI: true - name: Run Tests run: | cd meteor - - # setup zodern:types. No linters are setup, so this simply installs the packages - meteor lint - NODE_OPTIONS="--max-old-space-size=6144" yarn unitci --force-exit env: CI: true @@ -106,68 +106,13 @@ jobs: - uses: actions/checkout@v6 with: persist-credentials: false - - name: Determine if images should be published to DockerHub - id: dockerhub - run: | - # check if a release branch, or main, or a tag - if [[ "${{ github.ref }}" =~ ^refs/heads/release([0-9]+)$ || "${{ github.ref }}" == "refs/heads/main" || "${{ github.ref }}" == refs/tags/* ]] - then - DOCKERHUB_PUBLISH="1" - else - DOCKERHUB_PUBLISH="0" - fi - # debug output - echo "dockerhub-publish $DOCKERHUB_PUBLISH" - echo "dockerhub-publish=$DOCKERHUB_PUBLISH" >> $GITHUB_OUTPUT - - name: Check if push to GHCR is enabled - id: check-ghcr - env: - GHCR_ENABLED: ${{ secrets.GHCR_ENABLED }} - run: | - echo "Enable push to GHCR: ${{ env.GHCR_ENABLED != '' }}" - echo "enable=${{ env.GHCR_ENABLED != '' }}" >> $GITHUB_OUTPUT - - name: Check if there is access to repo secrets (needed for build and push) - if: steps.dockerhub.outputs.dockerhub-publish == '1' || steps.check-ghcr.outputs.enable == 'true' - id: check-build-and-push - env: - SECRET_ACCESS: ${{ secrets.DOCKERHUB_IMAGE_PREFIX }} - run: | - echo "Enable build and push: ${{ env.SECRET_ACCESS != '' }}" - echo "enable=${{ env.SECRET_ACCESS != '' }}" >> $GITHUB_OUTPUT - - name: Get the Docker tag for GHCR - id: ghcr-tag - if: steps.check-build-and-push.outputs.enable == 'true' - uses: docker/metadata-action@v5 - with: - images: | - ghcr.io/${{ github.repository }}-server-core - tags: | - type=schedule - type=ref,event=branch - type=ref,event=tag - type=raw,value=latest,enable={{is_default_branch}} - - name: Get the Docker tag for DockerHub - id: dockerhub-tag - if: steps.check-build-and-push.outputs.enable == 'true' - uses: docker/metadata-action@v5 - with: - images: | - ${{ secrets.DOCKERHUB_IMAGE_PREFIX }}server-core - tags: | - type=schedule - type=ref,event=branch - type=ref,event=tag - type=raw,value=latest,enable={{is_default_branch}} - name: Use Node.js - if: steps.check-build-and-push.outputs.enable == 'true' uses: actions/setup-node@v6 with: node-version-file: ".node-version" - uses: ./.github/actions/setup-meteor - if: steps.check-build-and-push.outputs.enable == 'true' - name: restore node_modules uses: actions/cache@v4 - if: steps.check-build-and-push.outputs.enable == 'true' with: path: | node_modules @@ -175,46 +120,94 @@ jobs: packages/node_modules key: ${{ runner.os }}-${{ hashFiles('yarn.lock', 'meteor/yarn.lock', 'meteor/.meteor/release', 'packages/yarn.lock') }} - name: Prepare Environment - if: steps.check-build-and-push.outputs.enable == 'true' run: | corepack enable yarn install + + # setup zodern:types. No linters are setup, so this simply installs the packages + yarn meteor lint + - name: Build libs - if: steps.check-build-and-push.outputs.enable == 'true' run: | yarn build:packages + - name: Build webui + run: | + cd packages/webui + yarn build - name: Persist Built Version information - if: steps.check-build-and-push.outputs.enable == 'true' run: | cd meteor yarn inject-git-hash - name: Prepare webui for meteor build - if: steps.check-build-and-push.outputs.enable == 'true' run: | rm -Rf meteor/public cp -R packages/webui/dist meteor/public - name: Meteor Build - if: steps.check-build-and-push.outputs.enable == 'true' run: | cd meteor NODE_OPTIONS="--max-old-space-size=4096" METEOR_DEBUG_BUILD=1 meteor build --allow-superuser --directory . mv bundle/programs/web.browser/assets/ bundle/programs/web.browser/app/assets/ || true - - name: Meteor Bundle NPM Build - if: steps.check-build-and-push.outputs.enable == 'true' run: | cd meteor/bundle/programs/server meteor npm install - name: Set up Docker Buildx - if: steps.check-build-and-push.outputs.enable == 'true' uses: docker/setup-buildx-action@v3 - - name: Login to DockerHub - if: steps.check-build-and-push.outputs.enable == 'true' && steps.dockerhub.outputs.dockerhub-publish == '1' - uses: docker/login-action@v3 + + # Check how the image should be built and pushed + - name: Determine if images should be published to DockerHub + id: dockerhub + run: | + # check if a release branch, or main, or a tag + if [[ "${{ github.ref }}" =~ ^refs/heads/release([0-9]+)$ || "${{ github.ref }}" == "refs/heads/main" || "${{ github.ref }}" == refs/tags/* ]] + then + DOCKERHUB_PUBLISH="1" + else + DOCKERHUB_PUBLISH="0" + fi + # debug output + echo "dockerhub-publish $DOCKERHUB_PUBLISH" + echo "dockerhub-publish=$DOCKERHUB_PUBLISH" >> $GITHUB_OUTPUT + - name: Check if push to GHCR is enabled + id: check-ghcr + env: + GHCR_ENABLED: ${{ secrets.GHCR_ENABLED }} + run: | + echo "Enable push to GHCR: ${{ env.GHCR_ENABLED != '' }}" + echo "enable=${{ env.GHCR_ENABLED != '' }}" >> $GITHUB_OUTPUT + - name: Check if there is access to repo secrets (needed for build and push) + if: steps.dockerhub.outputs.dockerhub-publish == '1' || steps.check-ghcr.outputs.enable == 'true' + id: check-build-and-push + env: + SECRET_ACCESS: ${{ secrets.DOCKERHUB_IMAGE_PREFIX }} + run: | + echo "Enable build and push: ${{ env.SECRET_ACCESS != '' }}" + echo "enable=${{ env.SECRET_ACCESS != '' }}" >> $GITHUB_OUTPUT + + # No-push build if no destination + - name: Build without push + if: steps.check-build-and-push.outputs.enable != 'true' + uses: docker/build-push-action@v6 with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} + context: . + file: ./meteor/Dockerfile.circle + push: false + provenance: false + + # GHCR build + - name: Get the Docker tag for GHCR + id: ghcr-tag + if: steps.check-build-and-push.outputs.enable == 'true' + uses: docker/metadata-action@v5 + with: + images: | + ghcr.io/${{ github.repository }}-server-core + tags: | + type=schedule + type=ref,event=branch + type=ref,event=tag + type=raw,value=latest,enable={{is_default_branch}} - name: Login to GitHub Container Registry if: steps.check-build-and-push.outputs.enable == 'true' && steps.check-ghcr.outputs.enable == 'true' uses: docker/login-action@v3 @@ -233,6 +226,26 @@ jobs: labels: ${{ steps.ghcr-tag.outputs.labels }} tags: "${{ steps.ghcr-tag.outputs.tags }}" github-token: ${{ github.token }} + + # Dockerhub push + - name: Get the Docker tag for DockerHub + id: dockerhub-tag + if: steps.check-build-and-push.outputs.enable == 'true' + uses: docker/metadata-action@v5 + with: + images: | + ${{ secrets.DOCKERHUB_IMAGE_PREFIX }}server-core + tags: | + type=schedule + type=ref,event=branch + type=ref,event=tag + type=raw,value=latest,enable={{is_default_branch}} + - name: Login to DockerHub + if: steps.check-build-and-push.outputs.enable == 'true' && steps.dockerhub.outputs.dockerhub-publish == '1' + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Build and push to DockerHub if: steps.check-build-and-push.outputs.enable == 'true' && steps.dockerhub.outputs.dockerhub-publish == '1' uses: docker/build-push-action@v6 @@ -243,6 +256,8 @@ jobs: provenance: false labels: ${{ steps.dockerhub-tag.outputs.labels }} tags: ${{ steps.dockerhub-tag.outputs.tags }} + + # Trivy scanning - name: Get image for Trivy scanning id: trivy-image if: steps.check-build-and-push.outputs.enable == 'true' && steps.check-ghcr.outputs.enable == 'true' && steps.ghcr-tag.outputs.tags != 0 @@ -286,6 +301,30 @@ jobs: - uses: actions/checkout@v6 with: persist-credentials: false + + - name: Use Node.js + uses: actions/setup-node@v6 + with: + node-version-file: ".node-version" + - name: restore node_modules + uses: actions/cache@v4 + with: + path: | + packages/node_modules + key: ${{ runner.os }}-${{ hashFiles('packages/yarn.lock') }} + - name: Build + run: | + corepack enable + + cd packages + yarn install + yarn build:single ${{ matrix.gateway-name }}/tsconfig.build.json + yarn run pinst --disable + yarn workspaces focus ${{ matrix.gateway-name }} --production + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + # Check how the image should be built and pushed - name: Determine if images should be published to DockerHub id: dockerhub run: | @@ -314,6 +353,18 @@ jobs: run: | echo "Enable build and push: ${{ env.SECRET_ACCESS != '' }}" echo "enable=${{ env.SECRET_ACCESS != '' }}" >> $GITHUB_OUTPUT + + # No-push build if no destination + - name: Build without push + if: steps.check-build-and-push.outputs.enable != 'true' + uses: docker/build-push-action@v6 + with: + context: ./packages + file: ./packages/${{ matrix.gateway-name }}/Dockerfile.circle + push: false + provenance: false + + # GHCR build - name: Get the Docker tag for GHCR id: ghcr-tag if: steps.check-build-and-push.outputs.enable == 'true' @@ -326,6 +377,25 @@ jobs: type=ref,event=branch type=ref,event=tag type=raw,value=latest,enable={{is_default_branch}} + - name: Login to GitHub Container Registry + if: steps.check-build-and-push.outputs.enable == 'true' && steps.check-ghcr.outputs.enable == 'true' + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Build and push to GHCR + if: steps.check-build-and-push.outputs.enable == 'true' && steps.check-ghcr.outputs.enable == 'true' && steps.ghcr-tag.outputs.tags != 0 + uses: docker/build-push-action@v6 + with: + context: ./packages + file: ./packages/${{ matrix.gateway-name }}/Dockerfile.circle + push: true + provenance: false + labels: ${{ steps.ghcr-tag.outputs.labels }} + tags: "${{ steps.ghcr-tag.outputs.tags }}" + + # Dockerhub push - name: Get the Docker tag for DockerHub id: dockerhub-tag if: steps.check-build-and-push.outputs.enable == 'true' @@ -338,54 +408,12 @@ jobs: type=ref,event=branch type=ref,event=tag type=raw,value=latest,enable={{is_default_branch}} - - name: Use Node.js - uses: actions/setup-node@v6 - if: steps.check-build-and-push.outputs.enable == 'true' - with: - node-version-file: ".node-version" - - name: restore node_modules - if: steps.check-build-and-push.outputs.enable == 'true' - uses: actions/cache@v4 - with: - path: | - packages/node_modules - key: ${{ runner.os }}-${{ hashFiles('packages/yarn.lock') }} - - name: Build - if: steps.check-build-and-push.outputs.enable == 'true' - run: | - corepack enable - - cd packages - yarn install - yarn lerna run --scope \*\*/${{ matrix.gateway-name }} --include-dependencies --stream build - yarn run pinst --disable - yarn workspaces focus ${{ matrix.gateway-name }} --production - - name: Set up Docker Buildx - if: steps.check-build-and-push.outputs.enable == 'true' - uses: docker/setup-buildx-action@v3 - name: Login to DockerHub if: steps.check-build-and-push.outputs.enable == 'true' && steps.dockerhub.outputs.dockerhub-publish == '1' uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Login to GitHub Container Registry - if: steps.check-build-and-push.outputs.enable == 'true' && steps.check-ghcr.outputs.enable == 'true' - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.repository_owner }} - password: ${{ secrets.GITHUB_TOKEN }} - - name: Build and push to GHCR - if: steps.check-build-and-push.outputs.enable == 'true' && steps.check-ghcr.outputs.enable == 'true' && steps.ghcr-tag.outputs.tags != 0 - uses: docker/build-push-action@v6 - with: - context: ./packages - file: ./packages/${{ matrix.gateway-name }}/Dockerfile.circle - push: true - provenance: false - labels: ${{ steps.ghcr-tag.outputs.labels }} - tags: "${{ steps.ghcr-tag.outputs.tags }}" - name: Build and push to DockerHub if: steps.check-build-and-push.outputs.enable == 'true' && steps.dockerhub.outputs.dockerhub-publish == '1' uses: docker/build-push-action@v6 @@ -396,6 +424,8 @@ jobs: provenance: false labels: ${{ steps.dockerhub-tag.outputs.labels }} tags: "${{ steps.dockerhub-tag.outputs.tags }}" + + # Trivy scanning - name: Get image for Trivy scanning id: trivy-image if: steps.check-build-and-push.outputs.enable == 'true' && steps.check-ghcr.outputs.enable == 'true' && steps.ghcr-tag.outputs.tags != 0 @@ -425,7 +455,7 @@ jobs: echo $CODE_BLOCK >> $GITHUB_STEP_SUMMARY lint-packages: - name: Lint Package + name: Lint Package ${{ matrix.package-name }} runs-on: ubuntu-latest timeout-minutes: 15 @@ -443,8 +473,10 @@ jobs: - job-worker - openapi - live-status-gateway - - webui - live-status-gateway-api + include: + - package-name: webui + tsconfig-name: tsconfig.json steps: - uses: actions/checkout@v6 @@ -467,7 +499,12 @@ jobs: cd packages yarn config set cacheFolder /home/runner/${{ matrix.package-name }}-cache yarn install - yarn lerna run --scope \*\*/${{ matrix.package-name }} --include-dependencies --stream build + + if [ "${{ matrix.package-name }}" = "openapi" ]; then + yarn workspace @sofie-automation/openapi run build + else + yarn build:single ${{ matrix.package-name }}/${{ matrix.tsconfig-name || 'tsconfig.build.json' }} + fi env: CI: true - name: Run typecheck and linter @@ -478,7 +515,7 @@ jobs: CI: true test-packages: - name: Test Package (main) + name: Test Package ${{ matrix.package-name }} (Node ${{ matrix.node-version }}) runs-on: ubuntu-latest timeout-minutes: 15 @@ -513,6 +550,7 @@ jobs: - node-version: 22.x package-name: webui send-coverage: true + tsconfig-name: tsconfig.json # manual meteor-lib as it only needs a couple of versions - node-version: 22.x package-name: meteor-lib @@ -539,7 +577,12 @@ jobs: cd packages yarn config set cacheFolder /home/runner/test-packages-cache yarn install - yarn lerna run --scope \*\*/${{ matrix.package-name }} --include-dependencies --stream build + + if [ "${{ matrix.package-name }}" = "openapi" ]; then + yarn workspace @sofie-automation/openapi run build + else + yarn build:single ${{ matrix.package-name }}/${{ matrix.tsconfig-name || 'tsconfig.build.json' }} + fi env: CI: true - name: Run tests @@ -631,6 +674,7 @@ jobs: uses: actions/setup-node@v6 with: node-version-file: ".node-version" + - uses: ./.github/actions/setup-meteor - name: restore node_modules uses: actions/cache@v4 with: @@ -641,9 +685,13 @@ jobs: run: | corepack enable - cd packages yarn config set cacheFolder /home/runner/publish-docs-cache yarn install + + # setup zodern:types. No linters are setup, so this simply installs the packages + yarn meteor lint + + cd packages yarn build:all env: CI: true diff --git a/.github/workflows/publish-libs.yml b/.github/workflows/publish-libs.yml index fc9e1eca8b..71ea216fbf 100644 --- a/.github/workflows/publish-libs.yml +++ b/.github/workflows/publish-libs.yml @@ -68,14 +68,14 @@ jobs: - name: Use Node.js uses: actions/setup-node@v6 with: - node-version-file: '.node-version' + node-version-file: ".node-version" - name: Prepare Environment run: | corepack enable cd packages yarn install - yarn lerna run --scope \*\*/${{ matrix.package-name }} --include-dependencies --stream build + yarn build:single ${{ matrix.package-name }}/tsconfig.build.json env: CI: true - name: Run typecheck and linter @@ -119,7 +119,7 @@ jobs: cd packages yarn install - yarn lerna run --scope \*\*/${{ matrix.package-name }} --include-dependencies --stream build + yarn build:single ${{ matrix.package-name }}/tsconfig.build.json env: CI: true - name: Run tests @@ -145,7 +145,7 @@ jobs: - name: Use Node.js uses: actions/setup-node@v6 with: - node-version-file: '.node-version' + node-version-file: ".node-version" - name: Prepare Environment run: | corepack enable @@ -222,7 +222,7 @@ jobs: - name: Use Node.js uses: actions/setup-node@v6 with: - node-version-file: '.node-version' + node-version-file: ".node-version" - name: Download release artifact uses: actions/download-artifact@v6 diff --git a/.github/workflows/sonar.yaml b/.github/workflows/sonar.yaml index a7b8cb1d8c..91acb75503 100644 --- a/.github/workflows/sonar.yaml +++ b/.github/workflows/sonar.yaml @@ -43,6 +43,10 @@ jobs: yarn config set cacheFolder /home/runner/lint-core-cache yarn + + # setup zodern:types. No linters are setup, so this simply installs the packages + yarn meteor lint + yarn build:packages env: CI: true diff --git a/.gitignore b/.gitignore index 6d86bbd070..d9105be1b9 100644 --- a/.gitignore +++ b/.gitignore @@ -15,6 +15,8 @@ meteor-settings.json .github/workflows/forkSync.yml .github/workflows/forkSynk.yml +**/*.tsbuildinfo + .pnp.* .yarn/* !.yarn/patches diff --git a/meteor/package.json b/meteor/package.json index aaa33259d1..c65b6fac0b 100644 --- a/meteor/package.json +++ b/meteor/package.json @@ -14,10 +14,10 @@ "unit": "jest", "unitci": "jest --maxWorkers 2 --coverage", "unitcov": "jest --coverage", - "test": "yarn check-types && yarn unit", + "test": "yarn unit", "watch": "jest --watch", "update-snapshots": "jest --updateSnapshot", - "ci:lint": "yarn check-types && yarn lint", + "ci:lint": "yarn lint", "cov-open": "open-cli coverage/lcov-report/index.html", "cov": "yarn unitcov && yarn cov-open", "license-validate": "node ../scripts/checkLicenses.js --allowed=\"MIT,BSD,ISC,Apache,Unlicense,CC0,LGPL,CC BY 3.0,CC BY 4.0,MPL 2.0,Python 2.0\" --excludePackages=timecode,rxjs/ajax,rxjs/fetch,rxjs/internal-compatibility,nw-pre-gyp-module-test,rxjs/operators,rxjs/testing,rxjs/webSocket,undefined,i18next-conv,@fortawesome/fontawesome-common-types,argv,indexof,custom-license,private,public-domain-module,@sofie-automation/corelib,@sofie-automation/shared-lib,@sofie-automation/job-worker", @@ -32,9 +32,7 @@ "prepareChangelog": "run release --prerelease --release-as patch", "validate:all-dependencies": "run validate:prod-dependencies && run validate:dev-dependencies && run license-validate", "validate:prod-dependencies": "yarn npm audit --environment production", - "validate:dev-dependencies": "yarn npm audit --environment development --severity moderate", - "check-types": "tsc --noEmit -p tsconfig.json", - "watch-types": "run check-types --watch" + "validate:dev-dependencies": "yarn npm audit --environment development --severity moderate" }, "dependencies": { "@babel/runtime": "^7.26.7", diff --git a/meteor/server/api/__tests__/cleanup.test.ts b/meteor/server/api/__tests__/cleanup.test.ts index 948f8eefb9..d27bded17c 100644 --- a/meteor/server/api/__tests__/cleanup.test.ts +++ b/meteor/server/api/__tests__/cleanup.test.ts @@ -203,7 +203,7 @@ async function setDefaultDatatoDB(env: DefaultEnvironment, now: number) { startSegmentId: segmentId, timelineObjectsString: '' as any, } - const pieceId = await Pieces.mutableCollection.insertAsync(piece) + await Pieces.mutableCollection.insertAsync(piece) await AdLibActions.mutableCollection.insertAsync({ _id: getRandomId(), @@ -265,22 +265,15 @@ async function setDefaultDatatoDB(env: DefaultEnvironment, now: number) { }) const packageId = await ExpectedPackages.mutableCollection.insertAsync({ _id: getRandomId(), - blueprintPackageId: '', - // @ts-expect-error bucketId is not a part of all ExpectedPackageDBs - bucketId, - content: {} as any, - contentVersionHash: '', - created: 0, - fromPieceType: '' as any, - layers: [], - pieceId, - rundownId, - segmentId, - sideEffect: {} as any, studioId, - sources: {} as any, - type: '' as any, - version: {} as any, + rundownId, + bucketId: null, + created: 0, + package: {} as any, + ingestSources: [], + playoutSources: { + pieceInstanceIds: [], + }, }) await ExpectedPackageWorkStatuses.insertAsync({ _id: getRandomId(), diff --git a/meteor/server/api/ingest/debug.ts b/meteor/server/api/ingest/debug.ts index 37c2947233..160d62070b 100644 --- a/meteor/server/api/ingest/debug.ts +++ b/meteor/server/api/ingest/debug.ts @@ -8,7 +8,6 @@ import { QueueStudioJob } from '../../worker/worker' import { StudioJobs } from '@sofie-automation/corelib/dist/worker/studio' import { RundownPlaylistId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { MeteorDebugMethods } from '../../methods' -import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' MeteorDebugMethods({ /** @@ -47,31 +46,4 @@ MeteorDebugMethods({ segmentExternalId: segment.externalId, }) }, - /** - * Regenerate all the expected packages for all rundowns in the system. - * Additionally it will recreate any expectedPlayoutItems. - * This shouldn't be necessary as ingest will do this for each rundown as part of its workflow - */ - debug_recreateExpectedPackages: async () => { - const rundowns = (await Rundowns.findFetchAsync( - {}, - { - projection: { - _id: 1, - studioId: 1, - source: 1, - }, - } - )) as Array> - - await Promise.all( - rundowns - .filter((rundown) => rundown.source.type !== 'snapshot') - .map(async (rundown) => - runIngestOperation(rundown.studioId, IngestJobs.ExpectedPackagesRegenerate, { - rundownId: rundown._id, - }) - ) - ) - }, }) diff --git a/meteor/server/api/ingest/packageInfo.ts b/meteor/server/api/ingest/packageInfo.ts index 24fe870235..06c19dfa59 100644 --- a/meteor/server/api/ingest/packageInfo.ts +++ b/meteor/server/api/ingest/packageInfo.ts @@ -1,10 +1,7 @@ import { - ExpectedPackageDBFromBucketAdLib, - ExpectedPackageDBFromBucketAdLibAction, - ExpectedPackageDBFromStudioBaselineObjects, ExpectedPackageDBType, - ExpectedPackageFromRundown, - ExpectedPackageFromRundownBaseline, + ExpectedPackageDB, + ExpectedPackageIngestSourceBucket, } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { PackageInfoDB } from '@sofie-automation/corelib/dist/dataModel/PackageInfos' import { ExpectedPackages, Rundowns } from '../../collections' @@ -28,8 +25,10 @@ export async function onUpdatedPackageInfo(packageId: ExpectedPackageId, _doc: P return } - if (pkg.listenToPackageInfoUpdates) { - switch (pkg.fromPieceType) { + for (const source of pkg.ingestSources) { + if (!source.listenToPackageInfoUpdates) continue + + switch (source.fromPieceType) { case ExpectedPackageDBType.PIECE: case ExpectedPackageDBType.ADLIB_PIECE: case ExpectedPackageDBType.ADLIB_ACTION: @@ -41,39 +40,44 @@ export async function onUpdatedPackageInfo(packageId: ExpectedPackageId, _doc: P break case ExpectedPackageDBType.BUCKET_ADLIB: case ExpectedPackageDBType.BUCKET_ADLIB_ACTION: - onUpdatedPackageInfoForBucketItemDebounce(pkg) + onUpdatedPackageInfoForBucketItemDebounce(pkg, source) break case ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: onUpdatedPackageInfoForStudioBaselineDebounce(pkg) break default: - assertNever(pkg) + assertNever(source) break } } } const pendingRundownPackageUpdates = new Map>() -function onUpdatedPackageInfoForRundownDebounce(pkg: ExpectedPackageFromRundown | ExpectedPackageFromRundownBaseline) { - const existingEntry = pendingRundownPackageUpdates.get(pkg.rundownId) +function onUpdatedPackageInfoForRundownDebounce(pkg: ExpectedPackageDB) { + if (!pkg.rundownId) { + logger.error(`Updating ExpectedPackage "${pkg._id}" not possibe: missing rundownId`) + return + } + + const rundownId = pkg.rundownId + + const existingEntry = pendingRundownPackageUpdates.get(rundownId) if (existingEntry) { // already queued, add to the batch existingEntry.push(pkg._id) } else { - pendingRundownPackageUpdates.set(pkg.rundownId, [pkg._id]) + pendingRundownPackageUpdates.set(rundownId, [pkg._id]) } // TODO: Scaling - this won't batch correctly if package manager directs calls to multiple instances lazyIgnore( - `onUpdatedPackageInfoForRundown_${pkg.rundownId}`, + `onUpdatedPackageInfoForRundown_${rundownId}`, () => { - const packageIds = pendingRundownPackageUpdates.get(pkg.rundownId) + const packageIds = pendingRundownPackageUpdates.get(rundownId) if (packageIds) { - pendingRundownPackageUpdates.delete(pkg.rundownId) - onUpdatedPackageInfoForRundown(pkg.rundownId, packageIds).catch((e) => { - logger.error( - `Updating ExpectedPackages for Rundown "${pkg.rundownId}" failed: ${stringifyError(e)}` - ) + pendingRundownPackageUpdates.delete(rundownId) + onUpdatedPackageInfoForRundown(rundownId, packageIds).catch((e) => { + logger.error(`Updating ExpectedPackages for Rundown "${rundownId}" failed: ${stringifyError(e)}`) }) } }, @@ -108,19 +112,24 @@ async function onUpdatedPackageInfoForRundown( }) } -function onUpdatedPackageInfoForBucketItemDebounce( - pkg: ExpectedPackageDBFromBucketAdLib | ExpectedPackageDBFromBucketAdLibAction -) { +function onUpdatedPackageInfoForBucketItemDebounce(pkg: ExpectedPackageDB, source: ExpectedPackageIngestSourceBucket) { + if (!pkg.bucketId) { + logger.error(`Updating ExpectedPackage "${pkg._id}" for Bucket "${pkg.bucketId}" not possible`) + return + } + + const bucketId = pkg.bucketId + lazyIgnore( - `onUpdatedPackageInfoForBucket_${pkg.studioId}_${pkg.bucketId}_${pkg.pieceExternalId}`, + `onUpdatedPackageInfoForBucket_${pkg.studioId}_${bucketId}_${source.pieceExternalId}`, () => { runIngestOperation(pkg.studioId, IngestJobs.BucketItemRegenerate, { - bucketId: pkg.bucketId, - externalId: pkg.pieceExternalId, + bucketId: bucketId, + externalId: source.pieceExternalId, }).catch((err) => { logger.error( - `Updating ExpectedPackages for Bucket "${pkg.bucketId}" Item "${ - pkg.pieceExternalId + `Updating ExpectedPackages for Bucket "${bucketId}" Item "${ + source.pieceExternalId }" failed: ${stringifyError(err)}` ) }) @@ -129,7 +138,7 @@ function onUpdatedPackageInfoForBucketItemDebounce( ) } -function onUpdatedPackageInfoForStudioBaselineDebounce(pkg: ExpectedPackageDBFromStudioBaselineObjects) { +function onUpdatedPackageInfoForStudioBaselineDebounce(pkg: ExpectedPackageDB) { lazyIgnore( `onUpdatedPackageInfoForStudioBaseline_${pkg.studioId}`, () => { diff --git a/meteor/server/api/integration/expectedPackages.ts b/meteor/server/api/integration/expectedPackages.ts index 42f98da9c3..f6d99d242c 100644 --- a/meteor/server/api/integration/expectedPackages.ts +++ b/meteor/server/api/integration/expectedPackages.ts @@ -34,6 +34,7 @@ import { } from '../../collections' import { logger } from '../../logging' import _ from 'underscore' +import { ExpectedPackageDB } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' export namespace PackageManagerIntegration { export async function updateExpectedPackageWorkStatuses( @@ -99,9 +100,17 @@ export namespace PackageManagerIntegration { const fromPackageIds = workStatus.fromPackages.map((p) => p.id) if (fromPackageIds.length) { ps.push( - ExpectedPackages.findOneAsync({ - _id: { $in: fromPackageIds }, - }).then((expPackage) => { + ExpectedPackages.findOneAsync( + { + _id: { $in: fromPackageIds }, + }, + { + projection: { + _id: 1, + studioId: 1, + }, + } + ).then((expPackage: Pick | undefined) => { if (!expPackage) throw new Meteor.Error(404, `ExpectedPackages "${fromPackageIds}" not found`) diff --git a/meteor/server/api/rest/v1/__tests__/typeConversions.spec.ts b/meteor/server/api/rest/v1/__tests__/typeConversions.spec.ts new file mode 100644 index 0000000000..e5ce8b8c9e --- /dev/null +++ b/meteor/server/api/rest/v1/__tests__/typeConversions.spec.ts @@ -0,0 +1,83 @@ +import { protectString } from '@sofie-automation/corelib/dist/protectedString' +import { buildStudioFromResolved } from '../typeConversion' +import { wrapDefaultObject } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' +import { DBStudio, IStudioSettings } from '@sofie-automation/corelib/dist/dataModel/Studio' +import { StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { IBlueprintConfig, StudioBlueprintManifest } from '@sofie-automation/blueprints-integration' +import { APIStudio } from '../../../../lib/rest/v1' + +describe('buildStudioFromResolved', () => { + test('preserves existing fields and overrides API ones', async () => { + const blueprintManifest = {} as unknown as StudioBlueprintManifest + const apiStudio = { + name: 'New Name', + settings: { frameRate: 25 } as IStudioSettings, + config: { someValue: 1 }, + supportedShowStyleBase: ['A'], + } as APIStudio + const existingStudio = { + _id: protectString('studio0'), + name: 'Studio 0', + settingsWithOverrides: wrapDefaultObject({ frameRate: 50, allowHold: true } as IStudioSettings), + blueprintConfigWithOverrides: wrapDefaultObject({ B: 0 } as IBlueprintConfig), + } as DBStudio + const studio = await buildStudioFromResolved({ + apiStudio, + existingStudio, + blueprintManifest, + blueprintId: protectString('bp1'), + studioId: protectString('studio0'), + }) + + expect(studio._id).toBe('studio0') + expect(studio.name).toBe('New Name') + expect(studio.blueprintId).toBe('bp1') + expect(studio.settingsWithOverrides.overrides).toContainEqual({ + op: 'set', + path: 'frameRate', + value: 25, + }) + expect(studio.blueprintConfigWithOverrides.overrides).toContainEqual({ + op: 'set', + path: 'someValue', + value: 1, + }) + }) + test('preserves existing fields and overrides API ones with blueprintConfigFromAPI defined', async () => { + const blueprintManifest = { blueprintConfigFromAPI: async () => ({ fromBlueprints: true }) } as any + const apiStudio = { + name: 'New Name', + settings: { frameRate: 25 } as IStudioSettings, + config: { someValue: 1 }, + supportedShowStyleBase: ['A'], + blueprintConfigPresetId: 'preset0', + } as APIStudio + const existingStudio = { + _id: protectString('studio0'), + name: 'Studio 0', + settingsWithOverrides: wrapDefaultObject({ frameRate: 50 } as IStudioSettings), + blueprintConfigWithOverrides: wrapDefaultObject({ B: 0 } as IBlueprintConfig), + } as DBStudio + const studio = await buildStudioFromResolved({ + apiStudio, + existingStudio, + blueprintManifest, + blueprintId: protectString('bp1'), + studioId: protectString('studio0'), + }) + + expect(studio._id).toBe('studio0') + expect(studio.name).toBe('New Name') + expect(studio.blueprintId).toBe('bp1') + expect(studio.settingsWithOverrides.overrides).toContainEqual({ + op: 'set', + path: 'frameRate', + value: 25, + }) + expect(studio.blueprintConfigWithOverrides.overrides).toContainEqual({ + op: 'set', + path: 'fromBlueprints', + value: true, + }) + }) +}) diff --git a/meteor/server/api/rest/v1/typeConversion.ts b/meteor/server/api/rest/v1/typeConversion.ts index 59a068398d..9742c5815b 100644 --- a/meteor/server/api/rest/v1/typeConversion.ts +++ b/meteor/server/api/rest/v1/typeConversion.ts @@ -309,19 +309,42 @@ export async function studioFrom(apiStudio: APIStudio, existingId?: StudioId): P } if (!blueprint) return undefined - let studio: DBStudio | undefined - if (existingId) studio = await Studios.findOneAsync(existingId) + let existingStudio: DBStudio | undefined + if (existingId) existingStudio = await Studios.findOneAsync(existingId) const blueprintManifest = evalBlueprint(blueprint) as StudioBlueprintManifest + + return buildStudioFromResolved({ + apiStudio, + existingStudio, + blueprintManifest, + blueprintId: blueprint._id, + studioId: existingId ?? getRandomId(), + }) +} + +export async function buildStudioFromResolved({ + apiStudio, + existingStudio, + blueprintManifest, + blueprintId, + studioId, +}: { + apiStudio: APIStudio + existingStudio?: DBStudio + blueprintManifest: StudioBlueprintManifest + blueprintId: BlueprintId + studioId: StudioId +}): Promise { let blueprintConfig: ObjectWithOverrides if (typeof blueprintManifest.blueprintConfigFromAPI !== 'function') { - blueprintConfig = studio - ? updateOverrides(studio.blueprintConfigWithOverrides, apiStudio.config as IBlueprintConfig) + blueprintConfig = existingStudio + ? updateOverrides(existingStudio.blueprintConfigWithOverrides, apiStudio.config as IBlueprintConfig) : wrapDefaultObject({}) } else { - blueprintConfig = studio + blueprintConfig = existingStudio ? updateOverrides( - studio.blueprintConfigWithOverrides, + existingStudio.blueprintConfigWithOverrides, await StudioBlueprintConfigFromAPI(apiStudio, blueprintManifest) ) : convertObjectIntoOverrides(await StudioBlueprintConfigFromAPI(apiStudio, blueprintManifest)) @@ -330,24 +353,15 @@ export async function studioFrom(apiStudio: APIStudio, existingId?: StudioId): P const studioSettings = studioSettingsFrom(apiStudio.settings) return { - _id: existingId ?? getRandomId(), - name: apiStudio.name, - blueprintId: blueprint?._id, - blueprintConfigPresetId: apiStudio.blueprintConfigPresetId, - blueprintConfigWithOverrides: blueprintConfig, - settingsWithOverrides: studio - ? updateOverrides(studio.settingsWithOverrides, studioSettings) - : wrapDefaultObject(studioSettings), - supportedShowStyleBase: apiStudio.supportedShowStyleBase?.map((id) => protectString(id)) ?? [], - mappingsWithOverrides: studio?.mappingsWithOverrides ?? wrapDefaultObject({}), - mappingsHash: studio?.mappingsHash, - routeSetsWithOverrides: studio?.routeSetsWithOverrides ?? wrapDefaultObject({}), - _rundownVersionHash: studio?._rundownVersionHash ?? '', + // fill in the blanks if there is no existing studio + mappingsWithOverrides: wrapDefaultObject({}), + routeSetsWithOverrides: wrapDefaultObject({}), + _rundownVersionHash: '', routeSetExclusivityGroupsWithOverrides: wrapDefaultObject({}), packageContainersWithOverrides: wrapDefaultObject({}), previewContainerIds: [], thumbnailContainerIds: [], - peripheralDeviceSettings: studio?.peripheralDeviceSettings ?? { + peripheralDeviceSettings: { deviceSettings: wrapDefaultObject({}), playoutDevices: wrapDefaultObject({}), ingestDevices: wrapDefaultObject({}), @@ -355,6 +369,20 @@ export async function studioFrom(apiStudio: APIStudio, existingId?: StudioId): P }, lastBlueprintConfig: undefined, lastBlueprintFixUpHash: undefined, + + // take what existing studio might have + ...existingStudio, + + // override what apiStudio can + _id: studioId, + name: apiStudio.name, + blueprintId, + blueprintConfigPresetId: apiStudio.blueprintConfigPresetId, + blueprintConfigWithOverrides: blueprintConfig, + settingsWithOverrides: existingStudio + ? updateOverrides(existingStudio.settingsWithOverrides, studioSettings) + : wrapDefaultObject(studioSettings), + supportedShowStyleBase: apiStudio.supportedShowStyleBase?.map((id) => protectString(id)) ?? [], } } diff --git a/meteor/server/collections/packages-media.ts b/meteor/server/collections/packages-media.ts index f004da7555..2685562924 100644 --- a/meteor/server/collections/packages-media.ts +++ b/meteor/server/collections/packages-media.ts @@ -14,15 +14,17 @@ export const ExpectedPackages = createAsyncOnlyReadOnlyMongoCollection( diff --git a/meteor/server/migration/1_50_0.ts b/meteor/server/migration/1_50_0.ts index 08a6be47d8..cb194d8c5e 100644 --- a/meteor/server/migration/1_50_0.ts +++ b/meteor/server/migration/1_50_0.ts @@ -33,7 +33,6 @@ import { JSONBlobStringify, JSONSchema, TSR } from '@sofie-automation/blueprints import { DEFAULT_MINIMUM_TAKE_SPAN } from '@sofie-automation/shared-lib/dist/core/constants' import { PartId } from '@sofie-automation/shared-lib/dist/core/model/Ids' import { protectString } from '@sofie-automation/shared-lib/dist/lib/protectedString' -import { ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { AdLibActionId, BucketAdLibActionId, @@ -44,6 +43,7 @@ import { import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' +import * as PackagesPreR53 from '@sofie-automation/corelib/dist/dataModel/Old/ExpectedPackagesR52' // Release 50 @@ -161,9 +161,9 @@ const oldDeviceTypeToNewMapping = { } const EXPECTED_PACKAGE_TYPES_ADDED_PART_ID = [ - ExpectedPackageDBType.PIECE, - ExpectedPackageDBType.ADLIB_PIECE, - ExpectedPackageDBType.ADLIB_ACTION, + PackagesPreR53.ExpectedPackageDBType.PIECE, + PackagesPreR53.ExpectedPackageDBType.ADLIB_PIECE, + PackagesPreR53.ExpectedPackageDBType.ADLIB_ACTION, ] export const addSteps = addMigrationSteps('1.50.0', [ @@ -875,10 +875,10 @@ export const addSteps = addMigrationSteps('1.50.0', [ return false }, migrate: async () => { - const objects = await ExpectedPackages.findFetchAsync({ + const objects = (await ExpectedPackages.findFetchAsync({ fromPieceType: { $in: EXPECTED_PACKAGE_TYPES_ADDED_PART_ID as any }, // Force the types, as the query does not match due to the interfaces partId: { $exists: false }, - }) + })) as unknown as Array const neededPieceIds: Array< PieceId | AdLibActionId | RundownBaselineAdLibActionId | BucketAdLibId | BucketAdLibActionId diff --git a/meteor/server/migration/X_X_X.ts b/meteor/server/migration/X_X_X.ts index 7c7cef98e2..30a74d769e 100644 --- a/meteor/server/migration/X_X_X.ts +++ b/meteor/server/migration/X_X_X.ts @@ -2,6 +2,14 @@ import { addMigrationSteps } from './databaseMigration' import { CURRENT_SYSTEM_VERSION } from './currentSystemVersion' import { MongoInternals } from 'meteor/mongo' import { Studios } from '../collections' +import { ExpectedPackages } from '../collections' +import * as PackagesPreR53 from '@sofie-automation/corelib/dist/dataModel/Old/ExpectedPackagesR52' +import { + ExpectedPackageDB, + ExpectedPackageIngestSource, +} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { BucketId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { assertNever, Complete } from '@sofie-automation/corelib/dist/lib' /* * ************************************************************************************** @@ -59,4 +67,132 @@ export const addSteps = addMigrationSteps(CURRENT_SYSTEM_VERSION, [ // Do nothing, the user will have to resolve this manually }, }, + { + id: `convert ExpectedPackages to new format`, + canBeRunAutomatically: true, + validate: async () => { + const packages = await ExpectedPackages.findFetchAsync({ + fromPieceType: { $exists: true }, + }) + + if (packages.length > 0) { + return 'ExpectedPackages must be converted to new format' + } + + return false + }, + migrate: async () => { + const packages = (await ExpectedPackages.findFetchAsync({ + fromPieceType: { $exists: true }, + })) as unknown as PackagesPreR53.ExpectedPackageDB[] + + for (const pkg of packages) { + let rundownId: RundownId | null = null + let bucketId: BucketId | null = null + let ingestSource: ExpectedPackageIngestSource | undefined + + switch (pkg.fromPieceType) { + case PackagesPreR53.ExpectedPackageDBType.PIECE: + case PackagesPreR53.ExpectedPackageDBType.ADLIB_PIECE: + rundownId = pkg.rundownId + ingestSource = { + fromPieceType: pkg.fromPieceType, + pieceId: pkg.pieceId, + partId: pkg.partId, + segmentId: pkg.segmentId, + blueprintPackageId: pkg.blueprintPackageId, + listenToPackageInfoUpdates: pkg.listenToPackageInfoUpdates, + } + break + case PackagesPreR53.ExpectedPackageDBType.ADLIB_ACTION: + rundownId = pkg.rundownId + ingestSource = { + fromPieceType: pkg.fromPieceType, + pieceId: pkg.pieceId, + partId: pkg.partId, + segmentId: pkg.segmentId, + blueprintPackageId: pkg.blueprintPackageId, + listenToPackageInfoUpdates: pkg.listenToPackageInfoUpdates, + } + break + case PackagesPreR53.ExpectedPackageDBType.BASELINE_ADLIB_PIECE: + rundownId = pkg.rundownId + ingestSource = { + fromPieceType: pkg.fromPieceType, + pieceId: pkg.pieceId, + blueprintPackageId: pkg.blueprintPackageId, + listenToPackageInfoUpdates: pkg.listenToPackageInfoUpdates, + } + break + case PackagesPreR53.ExpectedPackageDBType.BASELINE_ADLIB_ACTION: + rundownId = pkg.rundownId + ingestSource = { + fromPieceType: pkg.fromPieceType, + pieceId: pkg.pieceId, + blueprintPackageId: pkg.blueprintPackageId, + listenToPackageInfoUpdates: pkg.listenToPackageInfoUpdates, + } + break + case PackagesPreR53.ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: + rundownId = pkg.rundownId + ingestSource = { + fromPieceType: pkg.fromPieceType, + blueprintPackageId: pkg.blueprintPackageId, + listenToPackageInfoUpdates: pkg.listenToPackageInfoUpdates, + } + break + case PackagesPreR53.ExpectedPackageDBType.BUCKET_ADLIB: + bucketId = pkg.bucketId + ingestSource = { + fromPieceType: pkg.fromPieceType, + pieceId: pkg.pieceId, + pieceExternalId: pkg.pieceExternalId, + blueprintPackageId: pkg.blueprintPackageId, + listenToPackageInfoUpdates: pkg.listenToPackageInfoUpdates, + } + break + case PackagesPreR53.ExpectedPackageDBType.BUCKET_ADLIB_ACTION: + bucketId = pkg.bucketId + ingestSource = { + fromPieceType: pkg.fromPieceType, + pieceId: pkg.pieceId, + pieceExternalId: pkg.pieceExternalId, + blueprintPackageId: pkg.blueprintPackageId, + listenToPackageInfoUpdates: pkg.listenToPackageInfoUpdates, + } + break + case PackagesPreR53.ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: + ingestSource = { + fromPieceType: pkg.fromPieceType, + blueprintPackageId: pkg.blueprintPackageId, + listenToPackageInfoUpdates: pkg.listenToPackageInfoUpdates, + } + break + default: + assertNever(pkg) + break + } + + await ExpectedPackages.mutableCollection.removeAsync(pkg._id) + + if (ingestSource) { + await ExpectedPackages.mutableCollection.insertAsync({ + _id: pkg._id, // Preserve the old id to ensure references aren't broken. This will be 'corrected' upon first ingest operation + studioId: pkg.studioId, + rundownId: rundownId, + bucketId: bucketId, + package: { + ...(pkg as any), // Some fields should be pruned off this, but this is fine + _id: pkg.blueprintPackageId, + }, + created: pkg.created, + ingestSources: [ingestSource], + playoutSources: { + pieceInstanceIds: [], + }, + } satisfies Complete) + } + } + }, + }, ]) diff --git a/meteor/server/publications/packageManager/expectedPackages/contentCache.ts b/meteor/server/publications/packageManager/expectedPackages/contentCache.ts index b26af4113d..4d5a6533ed 100644 --- a/meteor/server/publications/packageManager/expectedPackages/contentCache.ts +++ b/meteor/server/publications/packageManager/expectedPackages/contentCache.ts @@ -3,7 +3,7 @@ import { literal } from '@sofie-automation/corelib/dist/lib' import { MongoFieldSpecifierOnesStrict } from '@sofie-automation/corelib/dist/mongo' import { ExpectedPackageDB } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' -import { PieceInstance, PieceInstancePiece } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' +import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' export type RundownPlaylistCompact = Pick< DBRundownPlaylist, @@ -16,27 +16,34 @@ export const rundownPlaylistFieldSpecifier = literal & { - piece: Pick -} +export type PieceInstanceCompact = Pick< + PieceInstance, + '_id' | 'rundownId' | 'partInstanceId' | 'neededExpectedPackageIds' +> export const pieceInstanceFieldsSpecifier = literal>({ _id: 1, rundownId: 1, - piece: { - expectedPackages: 1, - }, + partInstanceId: 1, + neededExpectedPackageIds: 1, +}) + +export type ExpectedPackageDBCompact = Pick + +export const expectedPackageDBFieldsSpecifier = literal>({ + _id: 1, + package: 1, }) export interface ExpectedPackagesContentCache { - ExpectedPackages: ReactiveCacheCollection + ExpectedPackages: ReactiveCacheCollection RundownPlaylists: ReactiveCacheCollection PieceInstances: ReactiveCacheCollection } export function createReactiveContentCache(): ExpectedPackagesContentCache { const cache: ExpectedPackagesContentCache = { - ExpectedPackages: new ReactiveCacheCollection('expectedPackages'), + ExpectedPackages: new ReactiveCacheCollection('expectedPackages'), RundownPlaylists: new ReactiveCacheCollection('rundownPlaylists'), PieceInstances: new ReactiveCacheCollection('pieceInstances'), } diff --git a/meteor/server/publications/packageManager/expectedPackages/generate.ts b/meteor/server/publications/packageManager/expectedPackages/generate.ts index ab39291755..5c815af910 100644 --- a/meteor/server/publications/packageManager/expectedPackages/generate.ts +++ b/meteor/server/publications/packageManager/expectedPackages/generate.ts @@ -1,6 +1,10 @@ -import { PackageContainerOnPackage, Accessor, AccessorOnPackage } from '@sofie-automation/blueprints-integration' -import { getContentVersionHash, getExpectedPackageId } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' -import { PeripheralDeviceId, ExpectedPackageId, PieceInstanceId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { + PackageContainerOnPackage, + Accessor, + AccessorOnPackage, + ExpectedPackage, +} from '@sofie-automation/blueprints-integration' +import { PeripheralDeviceId, ExpectedPackageId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { protectString, unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { PackageManagerExpectedPackage, @@ -15,7 +19,7 @@ import { DBStudio, StudioLight, StudioPackageContainer } from '@sofie-automation import { clone, omit } from '@sofie-automation/corelib/dist/lib' import { CustomPublishCollection } from '../../../lib/customPublication' import { logger } from '../../../logging' -import { ExpectedPackagesContentCache } from './contentCache' +import { ExpectedPackageDBCompact, ExpectedPackagesContentCache } from './contentCache' import type { StudioFields } from './publication' /** @@ -48,7 +52,7 @@ export async function updateCollectionForExpectedPackageIds( // Map the expectedPackages onto their specified layer: const allDeviceIds = new Set() - for (const layerName of packageDoc.layers) { + for (const layerName of packageDoc.package.layers) { const layerDeviceIds = layerNameToDeviceIds.get(layerName) for (const deviceId of layerDeviceIds || []) { allDeviceIds.add(deviceId) @@ -59,17 +63,7 @@ export async function updateCollectionForExpectedPackageIds( // Filter, keep only the routed mappings for this device: if (filterPlayoutDeviceIds && !filterPlayoutDeviceIds.includes(deviceId)) continue - const routedPackage = generateExpectedPackageForDevice( - studio, - { - ...packageDoc, - _id: unprotectString(packageDoc._id), - }, - deviceId, - null, - Priorities.OTHER, // low priority - packageContainers - ) + const routedPackage = generateExpectedPackageForDevice(studio, packageDoc, deviceId, packageContainers) updatedDocIds.add(routedPackage._id) collection.replace(routedPackage) @@ -78,118 +72,27 @@ export async function updateCollectionForExpectedPackageIds( // Remove all documents for an ExpectedPackage that was regenerated, and no update was issues collection.remove((doc) => { - if (doc.pieceInstanceId) return false - - if (missingExpectedPackageIds.has(protectString(doc.expectedPackage._id))) return true - - if (updatedDocIds.has(doc._id) && !regenerateIds.has(protectString(doc.expectedPackage._id))) return true - - return false - }) -} - -/** - * Regenerate the output for the provided PieceInstance `regenerateIds`, updating the data in `collection` as needed - * @param contentCache Cache of the database documents used - * @param studio Minimal studio document - * @param layerNameToDeviceIds Lookup table of package layers, to PeripheralDeviceIds the layer could be used with - * @param collection Output collection of the publication - * @param filterPlayoutDeviceIds PeripheralDeviceId filter applied to this publication - * @param regenerateIds Ids of PieceInstance documents to be recalculated - */ -export async function updateCollectionForPieceInstanceIds( - contentCache: ReadonlyDeep, - studio: Pick, - layerNameToDeviceIds: Map, - packageContainers: Record, - collection: CustomPublishCollection, - filterPlayoutDeviceIds: ReadonlyDeep | undefined, - regenerateIds: Set -): Promise { - const updatedDocIds = new Set() - const missingPieceInstanceIds = new Set() - - for (const pieceInstanceId of regenerateIds) { - const pieceInstanceDoc = contentCache.PieceInstances.findOne(pieceInstanceId) - if (!pieceInstanceDoc) { - missingPieceInstanceIds.add(pieceInstanceId) - continue - } - if (!pieceInstanceDoc.piece?.expectedPackages) continue - - pieceInstanceDoc.piece.expectedPackages.forEach((expectedPackage, i) => { - const sanitisedPackageId = getExpectedPackageId(pieceInstanceId, expectedPackage._id || '__unnamed' + i) - - // Map the expectedPackages onto their specified layer: - const allDeviceIds = new Set() - for (const layerName of expectedPackage.layers) { - const layerDeviceIds = layerNameToDeviceIds.get(layerName) - for (const deviceId of layerDeviceIds || []) { - allDeviceIds.add(deviceId) - } - } - - for (const deviceId of allDeviceIds) { - // Filter, keep only the routed mappings for this device: - if (filterPlayoutDeviceIds && !filterPlayoutDeviceIds.includes(deviceId)) continue - - const routedPackage = generateExpectedPackageForDevice( - studio, - { - ...expectedPackage, - _id: unprotectString(sanitisedPackageId), - rundownId: pieceInstanceDoc.rundownId, - contentVersionHash: getContentVersionHash(expectedPackage), - }, - deviceId, - pieceInstanceId, - Priorities.OTHER, // low priority - packageContainers - ) - - updatedDocIds.add(routedPackage._id) - collection.replace(routedPackage) - } - }) - } - - // Remove all documents for an ExpectedPackage that was regenerated, and no update was issues - collection.remove((doc) => { - if (!doc.pieceInstanceId) return false - - if (missingPieceInstanceIds.has(doc.pieceInstanceId)) return true + if (missingExpectedPackageIds.has(doc.expectedPackage._id)) return true - if (updatedDocIds.has(doc._id) && !regenerateIds.has(doc.pieceInstanceId)) return true + if (updatedDocIds.has(doc._id) && !regenerateIds.has(doc.expectedPackage._id)) return true return false }) } -enum Priorities { - // Lower priorities are done first - - /** Highest priority */ - PLAYOUT_CURRENT = 0, - /** Second-to-highest priority */ - PLAYOUT_NEXT = 1, - OTHER = 9, -} - function generateExpectedPackageForDevice( studio: Pick< StudioLight, '_id' | 'packageContainersWithOverrides' | 'previewContainerIds' | 'thumbnailContainerIds' >, - expectedPackage: PackageManagerExpectedPackageBase, + expectedPackage: ExpectedPackageDBCompact, deviceId: PeripheralDeviceId, - pieceInstanceId: PieceInstanceId | null, - priority: Priorities, packageContainers: Record ): PackageManagerExpectedPackage { // Lookup Package sources: const combinedSources: PackageContainerOnPackage[] = [] - for (const packageSource of expectedPackage.sources) { + for (const packageSource of expectedPackage.package.sources) { const lookedUpSource = packageContainers[packageSource.containerId] if (lookedUpSource) { combinedSources.push(calculateCombinedSource(packageSource, lookedUpSource)) @@ -207,24 +110,27 @@ function generateExpectedPackageForDevice( } // Lookup Package targets: - const combinedTargets = calculateCombinedTargets(expectedPackage, deviceId, packageContainers) + const combinedTargets = calculateCombinedTargets(expectedPackage.package, deviceId, packageContainers) - if (!combinedSources.length && expectedPackage.sources.length !== 0) { + if (!combinedSources.length && expectedPackage.package.sources.length !== 0) { logger.warn(`Pub.expectedPackagesForDevice: No sources found for "${expectedPackage._id}"`) } if (!combinedTargets.length) { logger.warn(`Pub.expectedPackagesForDevice: No targets found for "${expectedPackage._id}"`) } - expectedPackage.sideEffect = getSideEffect(expectedPackage, studio) + const packageSideEffect = getSideEffect(expectedPackage.package, studio) return { - _id: protectString(`${expectedPackage._id}_${deviceId}_${pieceInstanceId}`), - expectedPackage: expectedPackage, + _id: protectString(`${expectedPackage._id}_${deviceId}`), + expectedPackage: { + ...expectedPackage.package, + _id: expectedPackage._id, + sideEffect: packageSideEffect, + }, sources: combinedSources, targets: combinedTargets, - priority: priority, + priority: null, playoutDeviceId: deviceId, - pieceInstanceId, } } @@ -247,7 +153,7 @@ function calculateCombinedSource( for (const accessorId of accessorIds) { const sourceAccessor: Accessor.Any | undefined = lookedUpSource.container.accessors[accessorId] - const packageAccessor: AccessorOnPackage.Any | undefined = packageSource.accessors?.[accessorId] + const packageAccessor: ReadonlyDeep | undefined = packageSource.accessors?.[accessorId] if (packageAccessor && sourceAccessor && packageAccessor.type === sourceAccessor.type) { combinedSource.accessors[accessorId] = deepExtend({}, sourceAccessor, packageAccessor) @@ -261,7 +167,7 @@ function calculateCombinedSource( return combinedSource } function calculateCombinedTargets( - expectedPackage: PackageManagerExpectedPackageBase, + expectedPackage: ReadonlyDeep, deviceId: PeripheralDeviceId, packageContainers: Record ): PackageContainerOnPackage[] { diff --git a/meteor/server/publications/packageManager/expectedPackages/publication.ts b/meteor/server/publications/packageManager/expectedPackages/publication.ts index 5791b7ca5b..46328b5ce8 100644 --- a/meteor/server/publications/packageManager/expectedPackages/publication.ts +++ b/meteor/server/publications/packageManager/expectedPackages/publication.ts @@ -24,7 +24,7 @@ import { PackageManagerExpectedPackage } from '@sofie-automation/shared-lib/dist import { ExpectedPackagesContentObserver } from './contentObserver' import { createReactiveContentCache, ExpectedPackagesContentCache } from './contentCache' import { buildMappingsToDeviceIdMap } from './util' -import { updateCollectionForExpectedPackageIds, updateCollectionForPieceInstanceIds } from './generate' +import { updateCollectionForExpectedPackageIds } from './generate' import { PeripheralDevicePubSub, PeripheralDevicePubSubCollectionsNames, @@ -162,16 +162,13 @@ async function manipulateExpectedPackagesPublicationData( } let regenerateExpectedPackageIds: Set - let regeneratePieceInstanceIds: Set if (invalidateAllItems) { - // force every piece to be regenerated + // force every package to be regenerated collection.remove(null) regenerateExpectedPackageIds = new Set(state.contentCache.ExpectedPackages.find({}).map((p) => p._id)) - regeneratePieceInstanceIds = new Set(state.contentCache.PieceInstances.find({}).map((p) => p._id)) } else { // only regenerate the reported changes regenerateExpectedPackageIds = new Set(updateProps.invalidateExpectedPackageIds) - regeneratePieceInstanceIds = new Set(updateProps.invalidatePieceInstanceIds) } await updateCollectionForExpectedPackageIds( @@ -183,15 +180,53 @@ async function manipulateExpectedPackagesPublicationData( args.filterPlayoutDeviceIds, regenerateExpectedPackageIds ) - await updateCollectionForPieceInstanceIds( - state.contentCache, - state.studio, - state.layerNameToDeviceIds, - state.packageContainers, - collection, - args.filterPlayoutDeviceIds, - regeneratePieceInstanceIds - ) + + // Ensure the priorities are correct for the packages + // We can do this as a post-step, as it means we can generate the packages solely based on the content + // If one gets regenerated, its priority will be reset to OTHER. But as it has already changed, this fixup is 'free' + // For those not regenerated, we can set the priority to the correct value if it has changed, without any deeper checks + updatePackagePriorities(state.contentCache, collection) +} + +const PACKAGE_PRIORITY_PLAYOUT_CURRENT = 0 +const PACKAGE_PRIORITY_PLAYOUT_NEXT = 1 +const PACKAGE_PRIORITY_OTHER = 9 + +function updatePackagePriorities( + contentCache: ReadonlyDeep, + collection: CustomPublishCollection +) { + const packagePriorities = new Map() + + // Compile the map of the expected priority of each package + const knownPieceInstances = contentCache.PieceInstances.find({}) + const playlist = contentCache.RundownPlaylists.findOne({}) + const currentPartInstanceId = playlist?.currentPartInfo?.partInstanceId + for (const pieceInstance of knownPieceInstances) { + const packageIds = pieceInstance.neededExpectedPackageIds + if (!packageIds) continue + + const packagePriority = + pieceInstance.partInstanceId === currentPartInstanceId + ? PACKAGE_PRIORITY_PLAYOUT_CURRENT + : PACKAGE_PRIORITY_PLAYOUT_NEXT + + for (const packageId of packageIds) { + const existingPriority = packagePriorities.get(packageId) ?? PACKAGE_PRIORITY_OTHER + packagePriorities.set(packageId, Math.min(existingPriority, packagePriority)) + } + } + + // Iterate through and update each package + collection.updateAll((pkg) => { + const expectedPriority = packagePriorities.get(pkg.expectedPackage._id) ?? PACKAGE_PRIORITY_OTHER + if (pkg.priority === expectedPriority) return false + + return { + ...pkg, + priority: expectedPriority, + } + }) } meteorCustomPublish( diff --git a/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts b/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts index 1fd4f25426..f6a8069a8e 100644 --- a/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts +++ b/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts @@ -37,6 +37,7 @@ import { MediaObjects } from '../../../collections' import { PieceDependencies } from '../common' import { DEFAULT_MINIMUM_TAKE_SPAN } from '@sofie-automation/shared-lib/dist/core/constants' import { PieceContentStatusMessageFactory } from '../messageFactory' +import { RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' const mockMediaObjectsCollection = MongoMock.getInnerMockCollection(MediaObjects) @@ -450,9 +451,17 @@ describe('lib/mediaObjects', () => { timelineObjectsString: EmptyPieceTimelineObjectsBlob, }) + const mockOwnerId = protectString('rundown0') + const messageFactory = new PieceContentStatusMessageFactory(undefined) - const status1 = await checkPieceContentStatusAndDependencies(mockStudio, messageFactory, piece1, sourcelayer1) + const status1 = await checkPieceContentStatusAndDependencies( + mockStudio, + mockOwnerId, + messageFactory, + piece1, + sourcelayer1 + ) expect(status1[0].status).toEqual(PieceStatusCode.OK) expect(status1[0].messages).toHaveLength(0) expect(status1[1]).toMatchObject( @@ -463,7 +472,13 @@ describe('lib/mediaObjects', () => { }) ) - const status2 = await checkPieceContentStatusAndDependencies(mockStudio, messageFactory, piece2, sourcelayer1) + const status2 = await checkPieceContentStatusAndDependencies( + mockStudio, + mockOwnerId, + messageFactory, + piece2, + sourcelayer1 + ) expect(status2[0].status).toEqual(PieceStatusCode.SOURCE_BROKEN) expect(status2[0].messages).toHaveLength(1) expect(status2[0].messages[0]).toMatchObject({ @@ -477,7 +492,13 @@ describe('lib/mediaObjects', () => { }) ) - const status3 = await checkPieceContentStatusAndDependencies(mockStudio, messageFactory, piece3, sourcelayer1) + const status3 = await checkPieceContentStatusAndDependencies( + mockStudio, + mockOwnerId, + messageFactory, + piece3, + sourcelayer1 + ) expect(status3[0].status).toEqual(PieceStatusCode.SOURCE_MISSING) expect(status3[0].messages).toHaveLength(1) expect(status3[0].messages[0]).toMatchObject({ diff --git a/meteor/server/publications/pieceContentStatusUI/bucket/regenerateForItem.ts b/meteor/server/publications/pieceContentStatusUI/bucket/regenerateForItem.ts index 3c737d3329..a050f1f5af 100644 --- a/meteor/server/publications/pieceContentStatusUI/bucket/regenerateForItem.ts +++ b/meteor/server/publications/pieceContentStatusUI/bucket/regenerateForItem.ts @@ -48,6 +48,7 @@ export async function regenerateForBucketAdLibIds( if (sourceLayer) { const [status, itemDependencies] = await checkPieceContentStatusAndDependencies( uiStudio, + actionDoc.bucketId, messageFactories.get(actionDoc.showStyleBaseId), actionDoc, sourceLayer @@ -120,6 +121,7 @@ export async function regenerateForBucketActionIds( const [status, itemDependencies] = await checkPieceContentStatusAndDependencies( uiStudio, + actionDoc.bucketId, messageFactories.get(actionDoc.showStyleBaseId), fakedPiece, sourceLayer diff --git a/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts b/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts index 5584e24961..5aa69a23db 100644 --- a/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts +++ b/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts @@ -11,7 +11,14 @@ import { VTContent, } from '@sofie-automation/blueprints-integration' import { getExpectedPackageId } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' -import { ExpectedPackageId, PeripheralDeviceId, PieceInstanceId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { + BucketId, + ExpectedPackageId, + PeripheralDeviceId, + PieceInstanceId, + RundownId, + StudioId, +} from '@sofie-automation/corelib/dist/dataModel/Ids' import { getPackageContainerPackageId, PackageContainerPackageStatusDB, @@ -220,6 +227,7 @@ export interface PieceContentStatusStudio export async function checkPieceContentStatusAndDependencies( studio: PieceContentStatusStudio, + packageOwnerId: RundownId | BucketId | StudioId, messageFactory: PieceContentStatusMessageFactory | undefined, piece: PieceContentStatusPiece, sourceLayer: ISourceLayer @@ -290,6 +298,7 @@ export async function checkPieceContentStatusAndDependencies( piece, sourceLayer, studio, + packageOwnerId, getPackageInfos, getPackageContainerPackageStatus, messageFactory || DEFAULT_MESSAGE_FACTORY @@ -589,6 +598,7 @@ async function checkPieceContentExpectedPackageStatus( piece: PieceContentStatusPiece, sourceLayer: ISourceLayer, studio: PieceContentStatusStudio, + packageOwnerId: RundownId | BucketId | StudioId, getPackageInfos: (packageId: ExpectedPackageId) => Promise, getPackageContainerPackageStatus: ( packageContainerId: string, @@ -657,20 +667,8 @@ async function checkPieceContentExpectedPackageStatus( checkedPackageContainers.add(matchedPackageContainer[0]) - const expectedPackageIds = [getExpectedPackageId(piece._id, expectedPackage._id)] - if (piece.pieceInstanceId) { - // If this is a PieceInstance, try looking up the PieceInstance first - expectedPackageIds.unshift(getExpectedPackageId(piece.pieceInstanceId, expectedPackage._id)) - - if (piece.previousPieceInstanceId) { - // Also try the previous PieceInstance, when this is an infinite continuation in case package-manager needs to catchup - expectedPackageIds.unshift( - getExpectedPackageId(piece.previousPieceInstanceId, expectedPackage._id) - ) - } - } - const fileName = getExpectedPackageFileName(expectedPackage) ?? '' + const containerLabel = matchedPackageContainer[1].container.label // Check if any of the sources exist and are valid // Future: This might be better to do by passing packageManager an 'forcedError' property in the publication, but this direct check is simpler and enough for now @@ -690,55 +688,51 @@ async function checkPieceContentExpectedPackageStatus( continue } - let warningMessage: ContentMessageLight | null = null - let matchedExpectedPackageId: ExpectedPackageId | null = null - for (const expectedPackageId of expectedPackageIds) { - const packageOnPackageContainer = await getPackageContainerPackageStatus( - matchedPackageContainer[0], - expectedPackageId - ) - if (!packageOnPackageContainer) continue - - matchedExpectedPackageId = expectedPackageId - - if (!thumbnailUrl) { - const sideEffect = getSideEffect(expectedPackage, studio) + const candidatePackageId = getExpectedPackageId(packageOwnerId, expectedPackage) + const packageOnPackageContainer = await getPackageContainerPackageStatus( + matchedPackageContainer[0], + candidatePackageId + ) + if (!packageOnPackageContainer) { + // If no package matched, we must have a warning - thumbnailUrl = await getAssetUrlFromPackageContainerStatus( - studio.packageContainers, - getPackageContainerPackageStatus, - expectedPackageId, - sideEffect.thumbnailContainerId, - sideEffect.thumbnailPackageSettings?.path - ) - } + pushOrMergeMessage({ + ...getPackageSourceMissingWarning(), + fileName: fileName, + packageContainers: [containerLabel], + }) - if (!previewUrl) { - const sideEffect = getSideEffect(expectedPackage, studio) + continue + } - previewUrl = await getAssetUrlFromPackageContainerStatus( - studio.packageContainers, - getPackageContainerPackageStatus, - expectedPackageId, - sideEffect.previewContainerId, - sideEffect.previewPackageSettings?.path - ) - } + if (!thumbnailUrl) { + const sideEffect = getSideEffect(expectedPackage, studio) - warningMessage = getPackageWarningMessage(packageOnPackageContainer.status) + thumbnailUrl = await getAssetUrlFromPackageContainerStatus( + studio.packageContainers, + getPackageContainerPackageStatus, + candidatePackageId, + sideEffect.thumbnailContainerId, + sideEffect.thumbnailPackageSettings?.path + ) + } - progress = getPackageProgress(packageOnPackageContainer.status) ?? undefined + if (!previewUrl) { + const sideEffect = getSideEffect(expectedPackage, studio) - // Found a packageOnPackageContainer - break + previewUrl = await getAssetUrlFromPackageContainerStatus( + studio.packageContainers, + getPackageContainerPackageStatus, + candidatePackageId, + sideEffect.previewContainerId, + sideEffect.previewPackageSettings?.path + ) } - const containerLabel = matchedPackageContainer[1].container.label - - if (!matchedExpectedPackageId || warningMessage) { - // If no package matched, we must have a warning - warningMessage = warningMessage ?? getPackageSourceMissingWarning() + progress = getPackageProgress(packageOnPackageContainer.status) ?? undefined + const warningMessage = getPackageWarningMessage(packageOnPackageContainer.status) + if (warningMessage) { pushOrMergeMessage({ ...warningMessage, fileName: fileName, @@ -753,7 +747,7 @@ async function checkPieceContentExpectedPackageStatus( containerLabel, } // Fetch scan-info about the package: - const dbPackageInfos = await getPackageInfos(matchedExpectedPackageId) + const dbPackageInfos = await getPackageInfos(candidatePackageId) for (const packageInfo of dbPackageInfos) { if (packageInfo.type === PackageInfo.Type.SCAN) { packageInfos[expectedPackage._id].scan = packageInfo.payload diff --git a/meteor/server/publications/pieceContentStatusUI/rundown/regenerateItems.ts b/meteor/server/publications/pieceContentStatusUI/rundown/regenerateItems.ts index 2aae8333f6..8b01eb8cc1 100644 --- a/meteor/server/publications/pieceContentStatusUI/rundown/regenerateItems.ts +++ b/meteor/server/publications/pieceContentStatusUI/rundown/regenerateItems.ts @@ -46,6 +46,7 @@ async function regenerateGenericPiece( if (part && segment && sourceLayer) { const [status, dependencies] = await checkPieceContentStatusAndDependencies( uiStudio, + part.rundownId, messageFactory, pieceDoc, sourceLayer @@ -182,6 +183,7 @@ export async function regenerateForPieceInstanceIds( if (partInstance && segment && sourceLayer) { const [status, dependencies] = await checkPieceContentStatusAndDependencies( uiStudio, + pieceDoc.rundownId, messageFactories.get(pieceDoc.rundownId), { ...pieceDoc.piece, @@ -381,6 +383,7 @@ export async function regenerateForBaselineAdLibPieceIds( if (sourceLayer) { const [status, dependencies] = await checkPieceContentStatusAndDependencies( uiStudio, + pieceDoc.rundownId, messageFactories.get(pieceDoc.rundownId), pieceDoc, sourceLayer @@ -461,6 +464,7 @@ export async function regenerateForBaselineAdLibActionIds( if (sourceLayer) { const [status, dependencies] = await checkPieceContentStatusAndDependencies( uiStudio, + actionDoc.rundownId, messageFactories.get(actionDoc.rundownId), fakedPiece, sourceLayer diff --git a/meteor/server/publications/segmentPartNotesUI/__tests__/generateNotesForSegment.test.ts b/meteor/server/publications/segmentPartNotesUI/__tests__/generateNotesForSegment.test.ts index 3debf64119..fea13201aa 100644 --- a/meteor/server/publications/segmentPartNotesUI/__tests__/generateNotesForSegment.test.ts +++ b/meteor/server/publications/segmentPartNotesUI/__tests__/generateNotesForSegment.test.ts @@ -486,4 +486,121 @@ describe('generateNotesForSegment', () => { ]) ) }) + + test('partInstance with runtime invalidReason', async () => { + const playlistId = protectString('playlist0') + const nrcsName = 'some nrcs' + + const segment: Pick = { + _id: protectString('segment0'), + _rank: 1, + rundownId: protectString('rundown0'), + name: 'A segment', + notes: [], + orphaned: undefined, + } + + const partInstance0: Pick = { + _id: protectString('instance0'), + segmentId: segment._id, + rundownId: segment.rundownId, + orphaned: undefined, + reset: false, + invalidReason: { + message: generateTranslation('Runtime error occurred'), + severity: NoteSeverity.ERROR, + }, + part: { + _id: protectString('part0'), + title: 'Test Part', + } as any, + } + + const notes = generateNotesForSegment(playlistId, segment, nrcsName, [], [partInstance0]) + expect(notes).toEqual( + literal([ + { + _id: protectString('segment0_partinstance_instance0_invalid_runtime'), + note: { + type: NoteSeverity.ERROR, + message: partInstance0.invalidReason!.message, + rank: segment._rank, + origin: { + segmentId: segment._id, + rundownId: segment.rundownId, + name: partInstance0.part.title, + partId: partInstance0.part._id, + segmentName: segment.name, + }, + }, + playlistId: playlistId, + rundownId: segment.rundownId, + segmentId: segment._id, + }, + ]) + ) + }) + + test('partInstance with runtime invalidReason but reset - no note', async () => { + const playlistId = protectString('playlist0') + const nrcsName = 'some nrcs' + + const segment: Pick = { + _id: protectString('segment0'), + _rank: 1, + rundownId: protectString('rundown0'), + name: 'A segment', + notes: [], + orphaned: undefined, + } + + const partInstance0: Pick = { + _id: protectString('instance0'), + segmentId: segment._id, + rundownId: segment.rundownId, + orphaned: undefined, + reset: true, + invalidReason: { + message: generateTranslation('Runtime error occurred'), + severity: NoteSeverity.ERROR, + }, + part: { + _id: protectString('part0'), + title: 'Test Part', + } as any, + } + + const notes = generateNotesForSegment(playlistId, segment, nrcsName, [], [partInstance0]) + expect(notes).toHaveLength(0) + }) + + test('partInstance without invalidReason - no note', async () => { + const playlistId = protectString('playlist0') + const nrcsName = 'some nrcs' + + const segment: Pick = { + _id: protectString('segment0'), + _rank: 1, + rundownId: protectString('rundown0'), + name: 'A segment', + notes: [], + orphaned: undefined, + } + + const partInstance0: Pick = { + _id: protectString('instance0'), + segmentId: segment._id, + rundownId: segment.rundownId, + orphaned: undefined, + reset: false, + invalidReason: undefined, + part: { + _id: protectString('part0'), + title: 'Test Part', + } as any, + } + + const notes = generateNotesForSegment(playlistId, segment, nrcsName, [], [partInstance0]) + expect(notes).toHaveLength(0) + }) }) diff --git a/meteor/server/publications/segmentPartNotesUI/__tests__/publication.test.ts b/meteor/server/publications/segmentPartNotesUI/__tests__/publication.test.ts index f232e38371..42e3400297 100644 --- a/meteor/server/publications/segmentPartNotesUI/__tests__/publication.test.ts +++ b/meteor/server/publications/segmentPartNotesUI/__tests__/publication.test.ts @@ -69,7 +69,7 @@ describe('manipulateUISegmentPartNotesPublicationData', () => { Rundowns: new ReactiveCacheCollection('Rundowns'), Segments: new ReactiveCacheCollection('Segments'), Parts: new ReactiveCacheCollection('Parts'), - DeletedPartInstances: new ReactiveCacheCollection('DeletedPartInstances'), + PartInstances: new ReactiveCacheCollection('PartInstances'), } newCache.Rundowns.insert({ @@ -356,11 +356,11 @@ describe('manipulateUISegmentPartNotesPublicationData', () => { invalid: false, invalidReason: undefined, }) - newCache.DeletedPartInstances.insert({ + newCache.PartInstances.insert({ _id: 'instance0', segmentId: segmentId0, rundownId: rundownId, - orphaned: undefined, + orphaned: 'deleted', reset: false, part: 'part' as any, }) @@ -421,6 +421,7 @@ describe('manipulateUISegmentPartNotesPublicationData', () => { [ { _id: 'instance0', + orphaned: 'deleted', part: 'part', reset: false, rundownId: 'rundown0', diff --git a/meteor/server/publications/segmentPartNotesUI/generateNotesForSegment.ts b/meteor/server/publications/segmentPartNotesUI/generateNotesForSegment.ts index 89dd9545a2..da4205a90d 100644 --- a/meteor/server/publications/segmentPartNotesUI/generateNotesForSegment.ts +++ b/meteor/server/publications/segmentPartNotesUI/generateNotesForSegment.ts @@ -155,5 +155,31 @@ export function generateNotesForSegment( } } + // Generate notes for runtime invalidReason on PartInstances + // This is distinct from planned invalidReason on Parts - these are runtime validation issues + for (const partInstance of partInstances) { + // Skip if the PartInstance has been reset (no longer relevant) or has no runtime invalidReason + if (partInstance.reset || !partInstance.invalidReason) continue + + notes.push({ + _id: protectString(`${segment._id}_partinstance_${partInstance._id}_invalid_runtime`), + playlistId, + rundownId: partInstance.rundownId, + segmentId: segment._id, + note: { + type: partInstance.invalidReason.severity ?? NoteSeverity.ERROR, + message: partInstance.invalidReason.message, + rank: segment._rank, + origin: { + segmentId: partInstance.segmentId, + partId: partInstance.part._id, + rundownId: partInstance.rundownId, + segmentName: segment.name, + name: partInstance.part.title, + }, + }, + }) + } + return notes } diff --git a/meteor/server/publications/segmentPartNotesUI/publication.ts b/meteor/server/publications/segmentPartNotesUI/publication.ts index 4de954886c..bbdd6c0236 100644 --- a/meteor/server/publications/segmentPartNotesUI/publication.ts +++ b/meteor/server/publications/segmentPartNotesUI/publication.ts @@ -91,7 +91,7 @@ async function setupUISegmentPartNotesPublicationObservers( triggerUpdate({ invalidateSegmentIds: [doc.segmentId, oldDoc.segmentId] }), removed: (doc) => triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }), }), - cache.DeletedPartInstances.find({}).observe({ + cache.PartInstances.find({}).observe({ added: (doc) => triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }), changed: (doc, oldDoc) => triggerUpdate({ invalidateSegmentIds: [doc.segmentId, oldDoc.segmentId] }), @@ -184,13 +184,13 @@ export async function manipulateUISegmentPartNotesPublicationData( interface UpdateNotesData { rundownsCache: Map> parts: Map[]> - deletedPartInstances: Map[]> + partInstances: Map[]> } function compileUpdateNotesData(cache: ReadonlyDeep): UpdateNotesData { return { rundownsCache: normalizeArrayToMap(cache.Rundowns.find({}).fetch(), '_id'), parts: groupByToMap(cache.Parts.find({}).fetch(), 'segmentId'), - deletedPartInstances: groupByToMap(cache.DeletedPartInstances.find({}).fetch(), 'segmentId'), + partInstances: groupByToMap(cache.PartInstances.find({}).fetch(), 'segmentId'), } } @@ -205,7 +205,7 @@ function updateNotesForSegment( segment, getRundownNrcsName(state.rundownsCache.get(segment.rundownId)), state.parts.get(segment._id) ?? [], - state.deletedPartInstances.get(segment._id) ?? [] + state.partInstances.get(segment._id) ?? [] ) // Insert generated notes diff --git a/meteor/server/publications/segmentPartNotesUI/reactiveContentCache.ts b/meteor/server/publications/segmentPartNotesUI/reactiveContentCache.ts index 5c0cb699a3..9e24227616 100644 --- a/meteor/server/publications/segmentPartNotesUI/reactiveContentCache.ts +++ b/meteor/server/publications/segmentPartNotesUI/reactiveContentCache.ts @@ -35,7 +35,7 @@ export const partFieldSpecifier = literal> >({ @@ -44,7 +44,9 @@ export const partInstanceFieldSpecifier = literal< rundownId: 1, orphaned: 1, reset: 1, + invalidReason: 1, // @ts-expect-error Deep not supported + 'part._id': 1, 'part.title': 1, }) @@ -52,7 +54,7 @@ export interface ContentCache { Rundowns: ReactiveCacheCollection> Segments: ReactiveCacheCollection> Parts: ReactiveCacheCollection> - DeletedPartInstances: ReactiveCacheCollection> + PartInstances: ReactiveCacheCollection> } export function createReactiveContentCache(): ContentCache { @@ -60,9 +62,7 @@ export function createReactiveContentCache(): ContentCache { Rundowns: new ReactiveCacheCollection>('rundowns'), Segments: new ReactiveCacheCollection>('segments'), Parts: new ReactiveCacheCollection>('parts'), - DeletedPartInstances: new ReactiveCacheCollection>( - 'deletedPartInstances' - ), + PartInstances: new ReactiveCacheCollection>('partInstances'), } return cache diff --git a/meteor/server/publications/segmentPartNotesUI/rundownContentObserver.ts b/meteor/server/publications/segmentPartNotesUI/rundownContentObserver.ts index 8beb78d1fe..82bb509065 100644 --- a/meteor/server/publications/segmentPartNotesUI/rundownContentObserver.ts +++ b/meteor/server/publications/segmentPartNotesUI/rundownContentObserver.ts @@ -58,8 +58,12 @@ export class RundownContentObserver { } ), PartInstances.observeChanges( - { rundownId: { $in: rundownIds }, reset: { $ne: true }, orphaned: 'deleted' }, - cache.DeletedPartInstances.link(), + { + rundownId: { $in: rundownIds }, + reset: { $ne: true }, + $or: [{ invalidReason: { $exists: true } }, { orphaned: 'deleted' }], + }, + cache.PartInstances.link(), { projection: partInstanceFieldSpecifier } ), ]) diff --git a/meteor/server/worker/__tests__/jobQueue.test.ts b/meteor/server/worker/__tests__/jobQueue.test.ts new file mode 100644 index 0000000000..93db5024fd --- /dev/null +++ b/meteor/server/worker/__tests__/jobQueue.test.ts @@ -0,0 +1,688 @@ +import '../../../__mocks__/_extendJest' +import { waitTime } from '../../../__mocks__/helpers/jest' +import { WorkerJobQueueManager } from '../jobQueue' + +// Mock Meteor.defer to run synchronously for testing +jest.mock('meteor/meteor', () => ({ + Meteor: { + defer: (fn: () => void) => { + // Run deferred functions immediately in tests + setTimeout(fn, 0) + }, + }, +})) + +// Mock the logging module +jest.mock('../../logging') + +// Mock getCurrentTime +const mockCurrentTime = jest.fn(() => Date.now()) +jest.mock('../../lib/lib', () => ({ + getCurrentTime: () => mockCurrentTime(), +})) + +describe('WorkerJobQueueManager', () => { + let manager: WorkerJobQueueManager + + beforeEach(() => { + manager = new WorkerJobQueueManager() + mockCurrentTime.mockReturnValue(Date.now()) + }) + + afterEach(() => { + jest.clearAllMocks() + }) + + describe('queueJobWithoutResult', () => { + it('should queue a job in the high priority queue by default', async () => { + const queueName = 'testQueue' + const jobName = 'testJob' + const jobData = { foo: 'bar' } + + await manager.queueJobWithoutResult(queueName, jobName, jobData, undefined) + + // Verify job is retrievable + const job = await manager.getNextJob(queueName) + expect(job).not.toBeNull() + expect(job?.name).toBe(jobName) + expect(job?.data).toEqual(jobData) + }) + + it('should queue a job in the low priority queue when lowPriority is true', async () => { + const queueName = 'testQueue' + const jobName = 'testJob' + const jobData = { foo: 'bar' } + + await manager.queueJobWithoutResult(queueName, jobName, jobData, { lowPriority: true }) + + // Verify job is retrievable + const job = await manager.getNextJob(queueName) + expect(job).not.toBeNull() + expect(job?.name).toBe(jobName) + }) + + it('should prioritize high priority jobs over low priority jobs', async () => { + const queueName = 'testQueue' + + // Queue low priority job first + await manager.queueJobWithoutResult(queueName, 'lowPriorityJob', { priority: 'low' }, { lowPriority: true }) + + // Queue high priority job second + await manager.queueJobWithoutResult(queueName, 'highPriorityJob', { priority: 'high' }, undefined) + + // First job retrieved should be high priority + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe('highPriorityJob') + + // Second job retrieved should be low priority + const secondJob = await manager.getNextJob(queueName) + expect(secondJob?.name).toBe('lowPriorityJob') + }) + }) + + describe('queueJobAndWrapResult', () => { + it('should return a WorkerJob with complete and getTimings promises', async () => { + const queueName = 'testQueue' + const jobName = 'testJob' + const jobData = { foo: 'bar' } + const now = Date.now() + + const workerJob = manager.queueJobAndWrapResult(queueName, jobName, jobData, now) + + expect(workerJob).toHaveProperty('complete') + expect(workerJob).toHaveProperty('getTimings') + expect(workerJob.complete).toBeInstanceOf(Promise) + expect(workerJob.getTimings).toBeInstanceOf(Promise) + }) + + it('should resolve complete promise with result when job finishes successfully', async () => { + const queueName = 'testQueue' + const jobName = 'testJob' + const jobData = { foo: 'bar' } + const now = Date.now() + const expectedResult = { success: true } + + const workerJob = manager.queueJobAndWrapResult(queueName, jobName, jobData, now) + + // Get the job from queue + const job = await manager.getNextJob(queueName) + expect(job).not.toBeNull() + + // Simulate job completion + const startedTime = now + 100 + const finishedTime = now + 200 + await manager.jobFinished(job!.id, startedTime, finishedTime, null, expectedResult) + + // Wait for the deferred callback + await waitTime(10) + + // Verify result + const result = await workerJob.complete + expect(result).toEqual(expectedResult) + }) + + it('should reject complete promise when job finishes with error', async () => { + const queueName = 'testQueue' + const jobName = 'testJob' + const jobData = { foo: 'bar' } + const now = Date.now() + + const workerJob = manager.queueJobAndWrapResult(queueName, jobName, jobData, now) + + // Add catch handler to avoid unhandled rejection + workerJob.complete.catch(() => { + // Expected rejection + }) + + // Get the job from queue + const job = await manager.getNextJob(queueName) + expect(job).not.toBeNull() + + // Simulate job failure + const startedTime = now + 100 + const finishedTime = now + 200 + await manager.jobFinished(job!.id, startedTime, finishedTime, 'Job failed', null) + + // Wait for the deferred callback + await waitTime(10) + + // Verify error - the error message is wrapped in an Error object + await expect(workerJob.complete).rejects.toBeInstanceOf(Error) + }) + + it('should resolve getTimings promise with correct timing information', async () => { + const queueName = 'testQueue' + const jobName = 'testJob' + const jobData = { foo: 'bar' } + const queueTime = 1000 + const startedTime = 1100 + const finishedTime = 1200 + const completedTime = 1250 + + mockCurrentTime.mockReturnValue(completedTime) + + const workerJob = manager.queueJobAndWrapResult(queueName, jobName, jobData, queueTime) + + // Get the job from queue + const job = await manager.getNextJob(queueName) + expect(job).not.toBeNull() + + // Simulate job completion + await manager.jobFinished(job!.id, startedTime, finishedTime, null, { result: 'ok' }) + + // Wait for the deferred callback + await waitTime(10) + + // Verify timings + const timings = await workerJob.getTimings + expect(timings.queueTime).toBe(queueTime) + expect(timings.startedTime).toBe(startedTime) + expect(timings.finishedTime).toBe(finishedTime) + expect(timings.completedTime).toBe(completedTime) + }) + }) + + describe('getNextJob', () => { + it('should return null when no jobs are queued', async () => { + const job = await manager.getNextJob('emptyQueue') + expect(job).toBeNull() + }) + + it('should return jobs in FIFO order within same priority', async () => { + const queueName = 'testQueue' + + await manager.queueJobWithoutResult(queueName, 'job1', { order: 1 }, undefined) + await manager.queueJobWithoutResult(queueName, 'job2', { order: 2 }, undefined) + await manager.queueJobWithoutResult(queueName, 'job3', { order: 3 }, undefined) + + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe('job1') + + const secondJob = await manager.getNextJob(queueName) + expect(secondJob?.name).toBe('job2') + + const thirdJob = await manager.getNextJob(queueName) + expect(thirdJob?.name).toBe('job3') + + const noJob = await manager.getNextJob(queueName) + expect(noJob).toBeNull() + }) + }) + + describe('waitForNextJob', () => { + it('should resolve immediately if jobs are already queued', async () => { + const queueName = 'testQueue' + + await manager.queueJobWithoutResult(queueName, 'existingJob', {}, undefined) + + // Should resolve without waiting + await expect(manager.waitForNextJob(queueName)).resolves.toBeUndefined() + }) + + it('should wait for a job to be queued', async () => { + const queueName = 'testQueue' + + // Start waiting for a job + const waitPromise = manager.waitForNextJob(queueName) + + // Queue a job after a short delay + setTimeout(async () => { + await manager.queueJobWithoutResult(queueName, 'newJob', {}, undefined) + }, 10) + + // Wait should resolve once job is queued + await expect(waitPromise).resolves.toBeUndefined() + }) + + it('should reject old worker when new worker starts waiting', async () => { + const queueName = 'testQueue' + + // First worker starts waiting + const firstWaitPromise = manager.waitForNextJob(queueName) + + // Add catch handler to prevent unhandled rejection warning + firstWaitPromise.catch(() => { + // Expected rejection + }) + + // Second worker starts waiting, should reject first + const secondWaitPromise = manager.waitForNextJob(queueName) + + // Wait for deferred rejection + await waitTime(10) + + // First worker should be rejected + await expect(firstWaitPromise).rejects.toThrow('new workerThread, replacing the old') + + // Queue a job for second worker + await manager.queueJobWithoutResult(queueName, 'job', {}, undefined) + + // Wait for deferred notification + await waitTime(10) + + // Second worker should resolve + await expect(secondWaitPromise).resolves.toBeUndefined() + }) + }) + + describe('interruptJobStream', () => { + it('should resolve waiting worker', async () => { + const queueName = 'testQueue' + + // Start waiting for a job + const waitPromise = manager.waitForNextJob(queueName) + + // Interrupt the queue + await manager.interruptJobStream(queueName) + + // Wait for deferred resolution + await waitTime(10) + + // Wait should resolve + await expect(waitPromise).resolves.toBeUndefined() + }) + + it('should push null job if no worker is waiting', async () => { + const queueName = 'testQueue' + + // Interrupt without any worker waiting + await manager.interruptJobStream(queueName) + + // Next worker should get null immediately (handled in getNextJob) + // But waitForNextJob should return immediately as there's a null job in queue + await expect(manager.waitForNextJob(queueName)).resolves.toBeUndefined() + }) + }) + + describe('rejectAllRunning', () => { + it('should reject all running jobs with error', async () => { + const queueName = 'testQueue' + + // Queue multiple jobs + const job1 = manager.queueJobAndWrapResult(queueName, 'job1', {}, Date.now()) + const job2 = manager.queueJobAndWrapResult(queueName, 'job2', {}, Date.now()) + + // Get jobs from queue (marks them as running) + await manager.getNextJob(queueName) + await manager.getNextJob(queueName) + + // Reject all running + manager.rejectAllRunning() + + // Both jobs should be rejected + await expect(job1.complete).rejects.toThrow('Thread closed') + await expect(job2.complete).rejects.toThrow('Thread closed') + }) + }) + + describe('debounce', () => { + it('should skip queueing duplicate job when debounce is enabled', async () => { + const queueName = 'testQueue' + const jobName = 'debounceJob' + const jobData = { foo: 'bar' } + const startTime = Date.now() + + mockCurrentTime.mockReturnValue(startTime) + + // Queue first job with debounce + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: 1000 }) + + // Queue identical job with debounce + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: 1000 }) + + // Advance time past debounce + mockCurrentTime.mockReturnValue(startTime + 1001) + + // Only one job should be in the queue + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe(jobName) + + const secondJob = await manager.getNextJob(queueName) + expect(secondJob).toBeNull() + }) + + it('should allow queueing different job names even with debounce', async () => { + const queueName = 'testQueue' + const jobData = { foo: 'bar' } + const startTime = Date.now() + + mockCurrentTime.mockReturnValue(startTime) + + await manager.queueJobWithoutResult(queueName, 'job1', jobData, { debounce: 1000 }) + await manager.queueJobWithoutResult(queueName, 'job2', jobData, { debounce: 1000 }) + + // Advance time past debounce + mockCurrentTime.mockReturnValue(startTime + 1001) + + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe('job1') + + const secondJob = await manager.getNextJob(queueName) + expect(secondJob?.name).toBe('job2') + }) + + it('should allow queueing same job name with different data even with debounce', async () => { + const queueName = 'testQueue' + const jobName = 'debounceJob' + const startTime = Date.now() + + mockCurrentTime.mockReturnValue(startTime) + + await manager.queueJobWithoutResult(queueName, jobName, { value: 1 }, { debounce: 1000 }) + await manager.queueJobWithoutResult(queueName, jobName, { value: 2 }, { debounce: 1000 }) + + // Advance time past debounce + mockCurrentTime.mockReturnValue(startTime + 1001) + + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe(jobName) + expect(firstJob?.data).toEqual({ value: 1 }) + + const secondJob = await manager.getNextJob(queueName) + expect(secondJob?.name).toBe(jobName) + expect(secondJob?.data).toEqual({ value: 2 }) + }) + + it('should queue job without debounce flag even if identical job exists', async () => { + const queueName = 'testQueue' + const jobName = 'testJob' + const jobData = { foo: 'bar' } + const startTime = Date.now() + + mockCurrentTime.mockReturnValue(startTime) + + // Queue with debounce + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: 1000 }) + + // Queue without debounce - should still be added (and available immediately) + await manager.queueJobWithoutResult(queueName, jobName, jobData, undefined) + + // The non-debounced job should be available immediately + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe(jobName) + + // The debounced job is not ready yet + const noJobYet = await manager.getNextJob(queueName) + expect(noJobYet).toBeNull() + + // Advance time past debounce + mockCurrentTime.mockReturnValue(startTime + 1001) + + const secondJob = await manager.getNextJob(queueName) + expect(secondJob?.name).toBe(jobName) + }) + + it('should allow re-queueing job after original is consumed', async () => { + const queueName = 'testQueue' + const jobName = 'debounceJob' + const jobData = { foo: 'bar' } + + // Queue first job with debounce + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: 1000 }) + + // Consume the job - need to wait for debounce time first + mockCurrentTime.mockReturnValue(Date.now() + 1001) + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe(jobName) + + // Queue same job again with debounce - should work since original was consumed + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: 1000 }) + + mockCurrentTime.mockReturnValue(Date.now() + 2002) + const secondJob = await manager.getNextJob(queueName) + expect(secondJob?.name).toBe(jobName) + }) + + it('should debounce across priority queues - high to low', async () => { + const queueName = 'testQueue' + const jobName = 'debounceJob' + const jobData = { foo: 'bar' } + + // Queue in high priority with debounce + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: 1000 }) + + // Try to queue identical in low priority with debounce - should be debounced + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: 1000, lowPriority: true }) + + // Wait for debounce time + mockCurrentTime.mockReturnValue(Date.now() + 1001) + + // Only one job should exist (still in high priority) + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe(jobName) + + const secondJob = await manager.getNextJob(queueName) + expect(secondJob).toBeNull() + }) + + it('should debounce across priority queues - low to high with priority upgrade', async () => { + const queueName = 'testQueue' + const jobName = 'debounceJob' + const jobData = { foo: 'bar' } + + // Queue in low priority with debounce + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: 1000, lowPriority: true }) + + // Try to queue identical in high priority with debounce - should upgrade existing job + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: 1000 }) + + // Wait for debounce time + mockCurrentTime.mockReturnValue(Date.now() + 1001) + + // Only one job should exist (upgraded to high priority) + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe(jobName) + + const secondJob = await manager.getNextJob(queueName) + expect(secondJob).toBeNull() + }) + + it('should prioritize upgraded job over other low priority jobs', async () => { + const queueName = 'testQueue' + const jobName = 'debounceJob' + const jobData = { foo: 'bar' } + + // Queue a low priority job first + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: 1000, lowPriority: true }) + + // Queue another low priority job + await manager.queueJobWithoutResult( + queueName, + 'otherLowPriorityJob', + { other: true }, + { lowPriority: true } + ) + + // Upgrade the first job to high priority + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: 1000 }) + + // Wait for debounce time + mockCurrentTime.mockReturnValue(Date.now() + 1001) + + // First job retrieved should be the upgraded one (now high priority) + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe(jobName) + + // Second should be the other low priority job + const secondJob = await manager.getNextJob(queueName) + expect(secondJob?.name).toBe('otherLowPriorityJob') + }) + + it('should respect debounce timing - getNextJob ignores jobs before notBefore', async () => { + const queueName = 'testQueue' + const jobName = 'debounceJob' + const jobData = { foo: 'bar' } + const debounceTime = 100 + const startTime = Date.now() + + mockCurrentTime.mockReturnValue(startTime) + + // Queue job with debounce + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: debounceTime }) + + // Job should not be available yet (before notBefore) + const jobBefore = await manager.getNextJob(queueName) + expect(jobBefore).toBeNull() + + // Advance time past debounce + mockCurrentTime.mockReturnValue(startTime + debounceTime + 1) + + // Job should now be available + const jobAfter = await manager.getNextJob(queueName) + expect(jobAfter?.name).toBe(jobName) + }) + + it('should respect debounce timing - waitForNextJob considers notBefore', async () => { + const queueName = 'testQueue' + const jobName = 'debounceJob' + const jobData = { foo: 'bar' } + const debounceTime = 100 + const startTime = Date.now() + + mockCurrentTime.mockReturnValue(startTime) + + // Queue job with debounce + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: debounceTime }) + + // waitForNextJob should wait (no ready jobs) - start waiting + const waitPromise = manager.waitForNextJob(queueName) + + // Advance time past debounce - this should trigger the timer and resolve the wait + mockCurrentTime.mockReturnValue(startTime + debounceTime + 1) + + // Wait a bit for the timer to fire + await waitTime(debounceTime + 50) + + // The wait should have resolved + await expect(waitPromise).resolves.toBeUndefined() + + // And the job should now be available + const job = await manager.getNextJob(queueName) + expect(job?.name).toBe(jobName) + }) + + it('should allow duplicate job after debounce time expires and job is consumed', async () => { + const queueName = 'testQueue' + const jobName = 'debounceJob' + const jobData = { foo: 'bar' } + const debounceTime = 100 + const startTime = Date.now() + + mockCurrentTime.mockReturnValue(startTime) + + // Queue first job with debounce + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: debounceTime }) + + // Advance time past debounce and consume the job + mockCurrentTime.mockReturnValue(startTime + debounceTime + 1) + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe(jobName) + + // Queue same job again - should work since original was consumed + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: debounceTime }) + + // Advance time past second debounce + mockCurrentTime.mockReturnValue(startTime + 2 * debounceTime + 2) + const secondJob = await manager.getNextJob(queueName) + expect(secondJob?.name).toBe(jobName) + }) + + it('should extend debounce window (notBefore) on subsequent debounce calls', async () => { + const queueName = 'testQueue' + const jobName = 'debounceJob' + const jobData = { foo: 'bar' } + const debounceTime = 100 + const startTime = Date.now() + + mockCurrentTime.mockReturnValue(startTime) + + // Queue first job with debounce - notBefore = startTime + 100 + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: debounceTime }) + + // After 50ms, queue duplicate - should extend notBefore to startTime + 150 + mockCurrentTime.mockReturnValue(startTime + 50) + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: debounceTime }) + + // At startTime + 110, job should NOT be ready (extended to 150) + mockCurrentTime.mockReturnValue(startTime + 110) + const jobTooEarly = await manager.getNextJob(queueName) + expect(jobTooEarly).toBeNull() + + // At startTime + 151, job should be ready + mockCurrentTime.mockReturnValue(startTime + 151) + const jobReady = await manager.getNextJob(queueName) + expect(jobReady?.name).toBe(jobName) + + // Should only be one job + const noMoreJobs = await manager.getNextJob(queueName) + expect(noMoreJobs).toBeNull() + }) + + it('should process non-debounced jobs immediately even when debounced jobs are waiting', async () => { + const queueName = 'testQueue' + const debounceTime = 1000 + const startTime = Date.now() + + mockCurrentTime.mockReturnValue(startTime) + + // Queue debounced job first + await manager.queueJobWithoutResult( + queueName, + 'debouncedJob', + { debounced: true }, + { debounce: debounceTime } + ) + + // Queue non-debounced job second + await manager.queueJobWithoutResult(queueName, 'immediateJob', { immediate: true }, undefined) + + // Non-debounced job should be available immediately + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe('immediateJob') + + // Debounced job should not be available yet + const secondJob = await manager.getNextJob(queueName) + expect(secondJob).toBeNull() + + // After debounce time, debounced job should be available + mockCurrentTime.mockReturnValue(startTime + debounceTime + 1) + const thirdJob = await manager.getNextJob(queueName) + expect(thirdJob?.name).toBe('debouncedJob') + }) + }) + + describe('multiple queues', () => { + it('should maintain separate queues for different queue names', async () => { + const queue1 = 'queue1' + const queue2 = 'queue2' + + await manager.queueJobWithoutResult(queue1, 'jobInQueue1', { queue: 1 }, undefined) + await manager.queueJobWithoutResult(queue2, 'jobInQueue2', { queue: 2 }, undefined) + + const jobFromQueue1 = await manager.getNextJob(queue1) + expect(jobFromQueue1?.name).toBe('jobInQueue1') + + const jobFromQueue2 = await manager.getNextJob(queue2) + expect(jobFromQueue2?.name).toBe('jobInQueue2') + + // Each queue should be empty now + expect(await manager.getNextJob(queue1)).toBeNull() + expect(await manager.getNextJob(queue2)).toBeNull() + }) + + it('should not mix jobs between different queues', async () => { + const queue1 = 'queue1' + const queue2 = 'queue2' + + await manager.queueJobWithoutResult(queue1, 'job1', {}, undefined) + await manager.queueJobWithoutResult(queue1, 'job2', {}, undefined) + + // Queue2 should have no jobs + expect(await manager.getNextJob(queue2)).toBeNull() + + // Queue1 should have both jobs + expect(await manager.getNextJob(queue1)).not.toBeNull() + expect(await manager.getNextJob(queue1)).not.toBeNull() + }) + }) +}) diff --git a/meteor/server/worker/jobQueue.ts b/meteor/server/worker/jobQueue.ts new file mode 100644 index 0000000000..459116cf10 --- /dev/null +++ b/meteor/server/worker/jobQueue.ts @@ -0,0 +1,410 @@ +import { UserError } from '@sofie-automation/corelib/dist/error' +import { MetricsCounter } from '@sofie-automation/corelib/dist/prometheus' +import type { JobSpec } from '@sofie-automation/job-worker/dist/main' +import { Meteor } from 'meteor/meteor' +import type { JobTimings, WorkerJob } from './worker' +import type { Time } from '@sofie-automation/shared-lib/dist/lib/lib' +import type { QueueJobOptions } from '@sofie-automation/job-worker/dist/jobs' +import { getRandomString } from '@sofie-automation/corelib/dist/lib' +import { logger } from '../logging' +import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' +import { getCurrentTime } from '../lib/lib' +import _ from 'underscore' + +const metricsQueueTotalCounter = new MetricsCounter({ + name: 'sofie_meteor_jobqueue_queue_total', + help: 'Number of jobs put into each worker job queues', + labelNames: ['threadName'], +}) +const metricsQueueSuccessCounter = new MetricsCounter({ + name: 'sofie_meteor_jobqueue_success', + help: 'Number of successful jobs from each worker', + labelNames: ['threadName'], +}) +const metricsQueueErrorsCounter = new MetricsCounter({ + name: 'sofie_meteor_jobqueue_queue_errors', + help: 'Number of failed jobs from each worker', + labelNames: ['threadName'], +}) + +interface JobQueue { + // A null job is an interruption of the queue; to ensure that something waiting is woken up + jobsHighPriority: Array + jobsLowPriority: Array + + /** Notify that there is a job waiting (aka worker is long-polling) */ + notifyWorker: PromiseWithResolvers | null + + metricsTotal: MetricsCounter.Internal + metricsSuccess: MetricsCounter.Internal + metricsErrors: MetricsCounter.Internal +} + +type JobCompletionHandler = (startedTime: number, finishedTime: number, err: any, result: any) => void + +interface RunningJob { + queueName: string + completionHandler: JobCompletionHandler | null +} + +interface JobEntry { + spec: JobSpec + /** The completionHandler is called when a job is completed. null implies "shoot-and-forget" */ + completionHandler: JobCompletionHandler | null + /** If set, the job should not be executed before this time (used for debouncing) */ + notBefore?: Time + /** Timer handle for waking up workers when this job becomes ready */ + debounceTimer?: NodeJS.Timeout +} + +export class WorkerJobQueueManager { + readonly #queues = new Map() + /** Contains all jobs that are currently being executed by a Worker. */ + readonly #runningJobs = new Map() + + #getOrCreateQueue(queueName: string): JobQueue { + let queue = this.#queues.get(queueName) + if (!queue) { + queue = { + jobsHighPriority: [], + jobsLowPriority: [], + notifyWorker: null, + metricsTotal: metricsQueueTotalCounter.labels(queueName), + metricsSuccess: metricsQueueSuccessCounter.labels(queueName), + metricsErrors: metricsQueueErrorsCounter.labels(queueName), + } + this.#queues.set(queueName, queue) + } + return queue + } + + // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types + async jobFinished(id: string, startedTime: number, finishedTime: number, err: any, result: any): Promise { + const job = this.#runningJobs.get(id) + if (job) { + this.#runningJobs.delete(id) + + // Update metrics + const queue = this.#queues.get(job.queueName) + if (queue) { + if (err) { + queue.metricsErrors.inc() + } else { + queue.metricsSuccess.inc() + } + } + + if (job.completionHandler) { + const userError = err ? UserError.tryFromJSON(err) || new Error(err) : undefined + job.completionHandler(startedTime, finishedTime, userError, result) + } + } + } + /** This is called by each Worker Thread, when it is idle and wants another job */ + async waitForNextJob(queueName: string): Promise { + const queue = this.#getOrCreateQueue(queueName) + const now = getCurrentTime() + + // Helper to check if a job is ready to execute + const isJobReady = (job: JobEntry | null): boolean => { + if (!job) return true // null jobs (interrupts) are always "ready" + return !job.notBefore || job.notBefore <= now + } + + // Check if there is a ready job waiting + if (queue.jobsHighPriority.some(isJobReady) || queue.jobsLowPriority.some(isJobReady)) { + return + } + // No ready job, do a long-poll + + // Already a worker waiting? Reject it, as we replace it + if (queue.notifyWorker) { + const oldNotify = queue.notifyWorker + + Meteor.defer(() => { + try { + // Notify the worker in the background + oldNotify.reject(new Error('new workerThread, replacing the old')) + } catch (_e) { + // Ignore + } + }) + } + + // Wait to be notified about a job + queue.notifyWorker = Promise.withResolvers() + return queue.notifyWorker.promise + } + /** This is called by each Worker Thread, when it thinks there is a job to execute */ + async getNextJob(queueName: string): Promise { + const queue = this.#getOrCreateQueue(queueName) + const now = getCurrentTime() + + // Helper to check if a job is ready to execute + const isJobReady = (job: JobEntry | null): boolean => { + if (!job) return true // null jobs (interrupts) are always "ready" + return !job.notBefore || job.notBefore <= now + } + + // Prefer high priority jobs - find first ready job + const highPriorityIndex = queue.jobsHighPriority.findIndex(isJobReady) + if (highPriorityIndex !== -1) { + const job = queue.jobsHighPriority.splice(highPriorityIndex, 1)[0] + if (job) { + this.#runningJobs.set(job.spec.id, { + queueName, + completionHandler: job.completionHandler, + }) + return job.spec + } + // null job (interrupt) - return null + return null + } + + // Check low priority jobs + const lowPriorityIndex = queue.jobsLowPriority.findIndex(isJobReady) + if (lowPriorityIndex !== -1) { + const job = queue.jobsLowPriority.splice(lowPriorityIndex, 1)[0] + this.#runningJobs.set(job.spec.id, { + queueName, + completionHandler: job.completionHandler, + }) + return job.spec + } + + // No ready job + return null + } + /** This is called when something restarts, to ensure the `queue.notifyWorker` doesnt get stuck */ + async interruptJobStream(queueName: string): Promise { + // Check if there is a job waiting: + const queue = this.#getOrCreateQueue(queueName) + if (queue.notifyWorker) { + const oldNotify = queue.notifyWorker + queue.notifyWorker = null + + Meteor.defer(() => { + try { + // Notify the worker in the background + oldNotify.resolve() + } catch (_e) { + // Ignore + } + }) + } else { + // There should be a worker waiting, its `getNextJob` might not have reached us yet + // So we push a `null` job at the start so that it interrupts immediately + queue.jobsHighPriority.unshift(null) + } + } + + async queueJobWithoutResult( + queueName: string, + jobName: string, + jobData: unknown, + options: QueueJobOptions | undefined + ): Promise { + this.#queueJobInner( + queueName, + { + spec: { + id: getRandomString(), + name: jobName, + data: jobData, + }, + completionHandler: null, + }, + options + ) + } + + queueJobAndWrapResult( + queueName: string, + jobName: string, + jobData: unknown, + now: Time, + options?: QueueJobOptions + ): WorkerJob { + const jobId = getRandomString() + const { result, completionHandler } = generateCompletionHandler(jobId, now) + + this.#queueJobInner( + queueName, + { + spec: { + id: jobId, + name: jobName, + data: jobData, + }, + completionHandler: completionHandler, + }, + options + ) + + return result + } + + #queueJobInner(queueName: string, jobToQueue: JobEntry, options?: QueueJobOptions): void { + const queue = this.#getOrCreateQueue(queueName) + const isLowPriority = options?.lowPriority ?? false + const debounceTime = options?.debounce + + // Debounce: check if an identical job is already queued in either priority queue + if (debounceTime) { + const matchJob = (job: JobEntry | null): job is JobEntry => + job !== null && job.spec.name === jobToQueue.spec.name && _.isEqual(job.spec.data, jobToQueue.spec.data) + + // Check high priority queue + const existingHighPriorityIndex = queue.jobsHighPriority.findIndex(matchJob) + if (existingHighPriorityIndex !== -1) { + // Job exists in high priority - just extend the notBefore time + const existingJob = queue.jobsHighPriority[existingHighPriorityIndex] as JobEntry + existingJob.notBefore = getCurrentTime() + debounceTime + + logger.debug(`Debounced duplicate job "${jobToQueue.spec.name}" in queue "${queueName}" (extended)`) + this.#scheduleDebounceWakeup(queue, existingJob) + return + } + + // Check low priority queue + const existingLowPriorityIndex = queue.jobsLowPriority.findIndex(matchJob) + if (existingLowPriorityIndex !== -1) { + const existingJob = queue.jobsLowPriority[existingLowPriorityIndex] + if (isLowPriority) { + // Job exists in low priority, new job is also low priority - just extend notBefore + existingJob.notBefore = getCurrentTime() + debounceTime + + logger.debug(`Debounced duplicate job "${jobToQueue.spec.name}" in queue "${queueName}" (extended)`) + this.#scheduleDebounceWakeup(queue, existingJob) + return + } else { + // Job exists in low priority, but new job is high priority - upgrade it + queue.jobsLowPriority.splice(existingLowPriorityIndex, 1) + existingJob.notBefore = getCurrentTime() + debounceTime + queue.jobsHighPriority.push(existingJob) + logger.debug( + `Debounced duplicate job "${jobToQueue.spec.name}" in queue "${queueName}" (upgraded to high priority)` + ) + this.#scheduleDebounceWakeup(queue, existingJob) + return + } + } + + // No existing job found, set notBefore on the new job + jobToQueue.notBefore = getCurrentTime() + debounceTime + } + + // Queue the job based on priority + if (isLowPriority) { + queue.jobsLowPriority.push(jobToQueue) + } else { + queue.jobsHighPriority.push(jobToQueue) + } + + queue.metricsTotal.inc() + + // If there is a worker waiting to pick up a job + if (jobToQueue.notBefore) { + // Schedule a wakeup for when the debounce time expires + this.#scheduleDebounceWakeup(queue, jobToQueue) + } else { + // Ensure a waiting worker is notified + this.#notifyWorker(queue) + } + } + + #scheduleDebounceWakeup(queue: JobQueue, job: JobEntry): void { + // Clear any existing timer for this job to avoid accumulating timers + if (job.debounceTimer) { + clearTimeout(job.debounceTimer) + delete job.debounceTimer + } + + if (job.notBefore) { + const delay = Math.max(0, job.notBefore - getCurrentTime()) + job.debounceTimer = setTimeout(() => { + delete job.debounceTimer + // Ensure a waiting worker is notified + this.#notifyWorker(queue) + }, delay) + } + } + + #notifyWorker(queue: JobQueue): void { + if (queue.notifyWorker) { + const notify = queue.notifyWorker + + // Worker is about to be notified, so clear the handle: + queue.notifyWorker = null + Meteor.defer(() => { + try { + // Notify the worker in the background + notify.resolve() + } catch (e) { + // Queue failed + logger.error(`Error in notifyWorker: ${stringifyError(e)}`) + } + }) + } + } + + rejectAllRunning(): void { + const now = getCurrentTime() + for (const job of this.#runningJobs.values()) { + const queue = this.#queues.get(job.queueName) + if (queue) queue.metricsErrors.inc() + + if (job.completionHandler) { + job.completionHandler(now, now, new Error('Thread closed'), null) + } + } + this.#runningJobs.clear() + } +} + +function generateCompletionHandler( + jobId: string, + queueTime: Time +): { result: WorkerJob; completionHandler: JobCompletionHandler } { + // logger.debug(`Queued job #${job.id} of "${name}" to "${queue.name}"`) + + const complete = Promise.withResolvers() + const getTimings = Promise.withResolvers() + + // TODO: Worker - timeouts + + /** The handler is called upon a completion */ + const completionHandler: JobCompletionHandler = (startedTime: number, finishedTime: number, err: any, res: any) => { + try { + if (err) { + logger.debug(`Completed job #${jobId} with error`) + complete.reject(err) + } else { + logger.debug(`Completed job #${jobId} with success`) + complete.resolve(res) + } + } catch (e) { + logger.error(`Job completion failed: ${stringifyError(e)}`) + } + + try { + getTimings.resolve({ + queueTime, + startedTime, + + finishedTime, + completedTime: getCurrentTime(), + }) + } catch (e) { + logger.error(`Job timing resolve failed: ${stringifyError(e)}`) + } + } + + return { + result: { + complete: complete.promise, + getTimings: getTimings.promise, + }, + completionHandler, + } +} diff --git a/meteor/server/worker/worker.ts b/meteor/server/worker/worker.ts index cab5db533f..6a813e0a3a 100644 --- a/meteor/server/worker/worker.ts +++ b/meteor/server/worker/worker.ts @@ -6,10 +6,8 @@ import { logger } from '../logging' import { Meteor } from 'meteor/meteor' import { FORCE_CLEAR_CACHES_JOB, IS_INSPECTOR_ENABLED } from '@sofie-automation/corelib/dist/worker/shared' import { threadedClass, Promisify, ThreadedClassManager } from 'threadedclass' -import type { JobSpec } from '@sofie-automation/job-worker/dist/main' import type { IpcJobWorker } from '@sofie-automation/job-worker/dist/ipc' import { getRandomString } from '@sofie-automation/corelib/dist/lib' -import type { Time } from '@sofie-automation/shared-lib/dist/lib/lib' import { getCurrentTime } from '../lib/lib' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' import { UserActionsLogItem } from '@sofie-automation/meteor-lib/dist/collections/UserActionsLog' @@ -21,210 +19,15 @@ import { LogEntry } from 'winston' import { initializeWorkerStatus, setWorkerStatus } from './workerStatus' import { MongoQuery } from '@sofie-automation/corelib/dist/mongo' import { UserActionsLog } from '../collections' -import { MetricsCounter } from '@sofie-automation/corelib/dist/prometheus' import { isInTestWrite } from '../security/securityVerify' -import { UserError } from '@sofie-automation/corelib/dist/error' +import { QueueJobOptions } from '@sofie-automation/job-worker/dist/jobs' +import { WorkerJobQueueManager } from './jobQueue' const FREEZE_LIMIT = 1000 // how long to wait for a response to a Ping const RESTART_TIMEOUT = 30000 // how long to wait for a restart to complete before throwing an error const KILL_TIMEOUT = 30000 // how long to wait for a thread to terminate before throwing an error -interface JobEntry { - spec: JobSpec - /** The completionHandler is called when a job is completed. null implies "shoot-and-forget" */ - completionHandler: JobCompletionHandler | null -} - -const metricsQueueTotalCounter = new MetricsCounter({ - name: 'sofie_meteor_jobqueue_queue_total', - help: 'Number of jobs put into each worker job queues', - labelNames: ['threadName'], -}) -const metricsQueueSuccessCounter = new MetricsCounter({ - name: 'sofie_meteor_jobqueue_success', - help: 'Number of successful jobs from each worker', - labelNames: ['threadName'], -}) -const metricsQueueErrorsCounter = new MetricsCounter({ - name: 'sofie_meteor_jobqueue_queue_errors', - help: 'Number of failed jobs from each worker', - labelNames: ['threadName'], -}) - -interface JobQueue { - jobs: Array - /** Notify that there is a job waiting (aka worker is long-polling) */ - notifyWorker: PromiseWithResolvers | null - - metricsTotal: MetricsCounter.Internal - metricsSuccess: MetricsCounter.Internal - metricsErrors: MetricsCounter.Internal -} - -type JobCompletionHandler = (startedTime: number, finishedTime: number, err: any, result: any) => void - -interface RunningJob { - queueName: string - completionHandler: JobCompletionHandler | null -} - -const queues = new Map() -/** Contains all jobs that are currently being executed by a Worker. */ -const runningJobs = new Map() - -function getOrCreateQueue(queueName: string): JobQueue { - let queue = queues.get(queueName) - if (!queue) { - queue = { - jobs: [], - notifyWorker: null, - metricsTotal: metricsQueueTotalCounter.labels(queueName), - metricsSuccess: metricsQueueSuccessCounter.labels(queueName), - metricsErrors: metricsQueueErrorsCounter.labels(queueName), - } - queues.set(queueName, queue) - } - return queue -} - -async function jobFinished( - id: string, - startedTime: number, - finishedTime: number, - err: any, - result: any -): Promise { - const job = runningJobs.get(id) - if (job) { - runningJobs.delete(id) - - // Update metrics - const queue = queues.get(job.queueName) - if (queue) { - if (err) { - queue.metricsErrors.inc() - } else { - queue.metricsSuccess.inc() - } - } - - if (job.completionHandler) { - const userError = err ? UserError.tryFromJSON(err) || new Error(err) : undefined - job.completionHandler(startedTime, finishedTime, userError, result) - } - } -} -/** This is called by each Worker Thread, when it is idle and wants another job */ -async function waitForNextJob(queueName: string): Promise { - // Check if there is a job waiting: - const queue = getOrCreateQueue(queueName) - if (queue.jobs.length > 0) { - return - } - // No job ready, do a long-poll - - // Already a worker waiting? Reject it, as we replace it - if (queue.notifyWorker) { - const oldNotify = queue.notifyWorker - - Meteor.defer(() => { - try { - // Notify the worker in the background - oldNotify.reject(new Error('new workerThread, replacing the old')) - } catch (_e) { - // Ignore - } - }) - } - - // Wait to be notified about a job - queue.notifyWorker = Promise.withResolvers() - return queue.notifyWorker.promise -} -/** This is called by each Worker Thread, when it thinks there is a job to execute */ -async function getNextJob(queueName: string): Promise { - // Check if there is a job waiting: - const queue = getOrCreateQueue(queueName) - const job = queue.jobs.shift() - if (job) { - // If there is a completion handler, register it for execution - runningJobs.set(job.spec.id, { - queueName, - completionHandler: job.completionHandler, - }) - - // Pass the job to the worker - return job.spec - } - - // No job ready - return null -} -/** This is called by each Worker Thread, when it is idle and wants another job */ -async function interruptJobStream(queueName: string): Promise { - // Check if there is a job waiting: - const queue = getOrCreateQueue(queueName) - if (queue.notifyWorker) { - const oldNotify = queue.notifyWorker - queue.notifyWorker = null - - Meteor.defer(() => { - try { - // Notify the worker in the background - oldNotify.resolve() - } catch (_e) { - // Ignore - } - }) - } else { - queue.jobs.unshift(null) - } -} -async function queueJobWithoutResult(queueName: string, jobName: string, jobData: unknown): Promise { - queueJobInner(queueName, { - spec: { - id: getRandomString(), - name: jobName, - data: jobData, - }, - completionHandler: null, - }) -} - -function queueJobInner(queueName: string, jobToQueue: JobEntry): void { - // Put the job at the end of the queue: - const queue = getOrCreateQueue(queueName) - queue.jobs.push(jobToQueue) - queue.metricsTotal.inc() - - // If there is a worker waiting to pick up a job - if (queue.notifyWorker) { - const notify = queue.notifyWorker - - // Worker is about to be notified, so clear the handle: - queue.notifyWorker = null - Meteor.defer(() => { - try { - // Notify the worker in the background - notify.resolve() - } catch (e) { - // Queue failed - logger.error(`Error in notifyWorker: ${stringifyError(e)}`) - } - }) - } -} - -function queueJobAndWrapResult(queueName: string, job: JobSpec, now: Time): WorkerJob { - const { result, completionHandler } = generateCompletionHandler(job.id, now) - - queueJobInner(queueName, { - spec: job, - completionHandler: completionHandler, - }) - - return result -} +const queueManager = new WorkerJobQueueManager() async function fastTrackTimeline(newTimeline: TimelineComplete): Promise { const studio = await fetchStudioLight(newTimeline._id) @@ -300,11 +103,11 @@ Meteor.startup(async () => { 'IpcJobWorker', [ workerId, - jobFinished, - interruptJobStream, - waitForNextJob, - getNextJob, - queueJobWithoutResult, + queueManager.jobFinished.bind(queueManager), + queueManager.interruptJobStream.bind(queueManager), + queueManager.waitForNextJob.bind(queueManager), + queueManager.getNextJob.bind(queueManager), + queueManager.queueJobWithoutResult.bind(queueManager), logLine, fastTrackTimeline, !IS_INSPECTOR_ENABLED, @@ -343,16 +146,7 @@ Meteor.startup(async () => { 'thread_closed', Meteor.bindEnvironment(() => { // Thread closed, reject all jobs - const now = getCurrentTime() - for (const job of runningJobs.values()) { - const queue = queues.get(job.queueName) - if (queue) queue.metricsErrors.inc() - - if (job.completionHandler) { - job.completionHandler(now, now, new Error('Thread closed'), null) - } - } - runningJobs.clear() + queueManager.rejectAllRunning() setWorkerStatus(workerId, false, 'Closed').catch((e) => { logger.error(`Failed to update worker threads status: ${stringifyError(e)}`) @@ -407,41 +201,17 @@ export async function QueueForceClearAllCaches(studioIds: StudioId[]): Promise( jobName: T, studioId: StudioId, - jobParameters: Parameters[0] + jobParameters: Parameters[0], + options?: QueueJobOptions ): Promise>> { if (isInTestWrite()) throw new Meteor.Error(404, 'Should not be reachable during startup tests') if (!studioId) throw new Meteor.Error(500, 'Missing studioId') const now = getCurrentTime() - return queueJobAndWrapResult( - getStudioQueueName(studioId), - { - id: getRandomString(), - name: jobName, - data: jobParameters, - }, - now - ) + return queueManager.queueJobAndWrapResult(getStudioQueueName(studioId), jobName, jobParameters, now, options) } /** @@ -491,60 +254,5 @@ export async function QueueIngestJob( if (!studioId) throw new Meteor.Error(500, 'Missing studioId') const now = getCurrentTime() - return queueJobAndWrapResult( - getIngestQueueName(studioId), - { - id: getRandomString(), - name: jobName, - data: jobParameters, - }, - now - ) -} - -function generateCompletionHandler( - jobId: string, - queueTime: Time -): { result: WorkerJob; completionHandler: JobCompletionHandler } { - // logger.debug(`Queued job #${job.id} of "${name}" to "${queue.name}"`) - - const complete = Promise.withResolvers() - const getTimings = Promise.withResolvers() - - // TODO: Worker - timeouts - - /** The handler is called upon a completion */ - const completionHandler: JobCompletionHandler = (startedTime: number, finishedTime: number, err: any, res: any) => { - try { - if (err) { - logger.debug(`Completed job #${jobId} with error`) - complete.reject(err) - } else { - logger.debug(`Completed job #${jobId} with success`) - complete.resolve(res) - } - } catch (e) { - logger.error(`Job completion failed: ${stringifyError(e)}`) - } - - try { - getTimings.resolve({ - queueTime, - startedTime, - - finishedTime, - completedTime: getCurrentTime(), - }) - } catch (e) { - logger.error(`Job timing resolve failed: ${stringifyError(e)}`) - } - } - - return { - result: { - complete: complete.promise, - getTimings: getTimings.promise, - }, - completionHandler, - } + return queueManager.queueJobAndWrapResult(getIngestQueueName(studioId), jobName, jobParameters, now) } diff --git a/meteor/tsconfig-base.json b/meteor/tsconfig-base.json index 18b23e7517..582e6ca72b 100644 --- a/meteor/tsconfig-base.json +++ b/meteor/tsconfig-base.json @@ -14,8 +14,16 @@ // "./node_modules/@types/meteor/*", "./.meteor/local/types/packages.d.ts" ] - } + }, + "noEmit": true }, "include": ["client/**/*", "server/**/*", "lib/**/*", "__mocks__/**/*", "tslint-rules/**/*"], - "exclude": ["node_modules", "**/.coverage/**/*"] + "exclude": ["node_modules", "**/.coverage/**/*"], + "references": [ + { "path": "../shared-lib/tsconfig.build.json" }, + { "path": "../blueprints-integration/tsconfig.build.json" }, + { "path": "../corelib/tsconfig.build.json" }, + { "path": "../meteor-lib/tsconfig.build.json" }, + { "path": "../job-worker/tsconfig.build.json" } + ] } diff --git a/package.json b/package.json index 0244cec7d6..25e5fff13e 100644 --- a/package.json +++ b/package.json @@ -19,7 +19,7 @@ "test:packages": "cd packages && run test", "lint:packages": "cd packages && run lint", "unit:packages": "cd packages && run unit", - "check-types:meteor": "cd meteor && run check-types", + "check-types": "cd packages && run build", "test:meteor": "cd meteor && run test", "lint:meteor": "cd meteor && yarn lint", "unit:meteor": "cd meteor && yarn unit", @@ -33,7 +33,7 @@ "meteor": "cd meteor && meteor", "docs:serve": "cd packages && run docs:serve", "reset": "node scripts/reset.mjs", - "test-all": "yarn install && run install-and-build && run check-types:meteor && run lint:packages && run lint:meteor && run test:packages && run test:meteor" + "test-all": "yarn install && run install-and-build && run check-types && run lint:packages && run lint:meteor && run test:packages && run test:meteor" }, "devDependencies": { "concurrently": "^9.1.2", diff --git a/packages/blueprints-integration/package.json b/packages/blueprints-integration/package.json index 4e7ff4cb94..eedabf1dec 100644 --- a/packages/blueprints-integration/package.json +++ b/packages/blueprints-integration/package.json @@ -15,8 +15,6 @@ }, "homepage": "https://github.com/Sofie-Automation/sofie-core/blob/main/packages/blueprints-integration#readme", "scripts": { - "build": "run -T rimraf dist && run build:main", - "build:main": "run -T tsc -p tsconfig.build.json", "lint:raw": "run -T eslint", "lint": "run lint:raw .", "unit": "run -T jest", diff --git a/packages/blueprints-integration/src/action.ts b/packages/blueprints-integration/src/action.ts index 209fa98385..41e34cb789 100644 --- a/packages/blueprints-integration/src/action.ts +++ b/packages/blueprints-integration/src/action.ts @@ -97,6 +97,9 @@ export interface IBlueprintActionManifest Promise<{ validationErrors: any } | void> + ) => Promise /** Generate adlib piece from ingest data */ getAdlibItem?: ( diff --git a/packages/blueprints-integration/src/context/onSetAsNextContext.ts b/packages/blueprints-integration/src/context/onSetAsNextContext.ts index 9e729ce402..6b35cf699d 100644 --- a/packages/blueprints-integration/src/context/onSetAsNextContext.ts +++ b/packages/blueprints-integration/src/context/onSetAsNextContext.ts @@ -1,5 +1,6 @@ import { IBlueprintMutatablePart, + IBlueprintMutatablePartInstance, IBlueprintPart, IBlueprintPartInstance, IBlueprintPiece, @@ -72,10 +73,16 @@ export interface IOnSetAsNextContext extends IShowStyleUserContext, IEventContex /** Update a piecesInstance from the partInstance being set as Next */ updatePieceInstance(pieceInstanceId: string, piece: Partial): Promise - /** Update a partInstance */ + /** + * Update a partInstance + * @param part Which part to update + * @param props Properties of the Part itself + * @param instanceProps Properties of the PartInstance (runtime state) + */ updatePartInstance( part: 'current' | 'next', - props: Partial + props: Partial, + instanceProps?: Partial ): Promise /** diff --git a/packages/blueprints-integration/src/context/partsAndPieceActionContext.ts b/packages/blueprints-integration/src/context/partsAndPieceActionContext.ts index 22af1b509f..26c3c5bfbd 100644 --- a/packages/blueprints-integration/src/context/partsAndPieceActionContext.ts +++ b/packages/blueprints-integration/src/context/partsAndPieceActionContext.ts @@ -1,6 +1,7 @@ import { ReadonlyDeep } from 'type-fest' import { IBlueprintMutatablePart, + IBlueprintMutatablePartInstance, IBlueprintPart, IBlueprintPartInstance, IBlueprintPiece, @@ -64,10 +65,16 @@ export interface IPartAndPieceActionContext { /** Update a piecesInstance */ updatePieceInstance(pieceInstanceId: string, piece: Partial): Promise - /** Update a partInstance */ + /** + * Update a partInstance + * @param part Which part to update + * @param props Properties of the Part itself + * @param instanceProps Properties of the PartInstance (runtime state) + */ updatePartInstance( part: 'current' | 'next', - props: Partial + props: Partial, + instanceProps?: Partial ): Promise /** Inform core that a take out of the partinstance should be blocked until the specified time */ blockTakeUntil(time: Time | null): Promise diff --git a/packages/blueprints-integration/src/context/syncIngestChangesContext.ts b/packages/blueprints-integration/src/context/syncIngestChangesContext.ts index e6917d443b..1f877ffd7b 100644 --- a/packages/blueprints-integration/src/context/syncIngestChangesContext.ts +++ b/packages/blueprints-integration/src/context/syncIngestChangesContext.ts @@ -1,6 +1,7 @@ import type { IRundownUserContext } from './rundownContext.js' import type { IBlueprintMutatablePart, + IBlueprintMutatablePartInstance, IBlueprintPartInstance, IBlueprintPiece, IBlueprintPieceInstance, @@ -37,8 +38,15 @@ export interface ISyncIngestUpdateToPartInstanceContext extends IRundownUserCont // /** Remove a ActionInstance */ // removeActionInstances(...actionInstanceIds: string[]): string[] - /** Update a partInstance */ - updatePartInstance(props: Partial): IBlueprintPartInstance + /** + * Update a partInstance + * @param props Properties of the Part itself + * @param instanceProps Properties of the PartInstance (runtime state) + */ + updatePartInstance( + props: Partial, + instanceProps?: Partial + ): IBlueprintPartInstance /** Remove the partInstance. This is only valid when `playstatus: 'next'` */ removePartInstance(): void diff --git a/packages/blueprints-integration/src/documents/partInstance.ts b/packages/blueprints-integration/src/documents/partInstance.ts index 9f30ff1bfb..b6bee63be4 100644 --- a/packages/blueprints-integration/src/documents/partInstance.ts +++ b/packages/blueprints-integration/src/documents/partInstance.ts @@ -1,10 +1,25 @@ import type { Time } from '../common.js' import type { IBlueprintPartDB } from './part.js' +import type { ITranslatableMessage } from '../translations.js' export type PartEndState = unknown +/** + * Properties of a PartInstance that can be modified at runtime by blueprints. + * These are runtime state properties, distinct from the planned Part properties. + */ +export interface IBlueprintMutatablePartInstance { + /** + * If set, this PartInstance exists and is valid as being next, but it cannot be taken in its current state. + * This can be used to block taking a PartInstance that requires user action to resolve. + * This is a runtime validation issue, distinct from the planned `invalidReason` on the Part itself. + */ + invalidReason?: ITranslatableMessage +} + /** The Part instance sent from Core */ -export interface IBlueprintPartInstance { +export interface IBlueprintPartInstance + extends IBlueprintMutatablePartInstance { _id: string /** The segment ("Title") this line belongs to */ segmentId: string diff --git a/packages/blueprints-integration/src/ingest.ts b/packages/blueprints-integration/src/ingest.ts index 5a96109936..dd97b3c361 100644 --- a/packages/blueprints-integration/src/ingest.ts +++ b/packages/blueprints-integration/src/ingest.ts @@ -130,6 +130,7 @@ export enum DefaultUserOperationsTypes { REVERT_RUNDOWN = '__sofie-revert-rundown', UPDATE_PROPS = '__sofie-update-props', IMPORT_MOS_ITEM = '__sofie-import-mos', + RETIME_PIECE = '__sofie-retime-piece', } export interface DefaultUserOperationRevertRundown { @@ -161,12 +162,24 @@ export type DefaultUserOperationImportMOSItem = { payload: any } +export type DefaultUserOperationRetimePiece = { + id: DefaultUserOperationsTypes.RETIME_PIECE + payload: { + segmentExternalId: string + partExternalId: string + + inPoint: number + // note - at some point this could also include an updated duration + } +} + export type DefaultUserOperations = | DefaultUserOperationRevertRundown | DefaultUserOperationRevertSegment | DefaultUserOperationRevertPart | DefaultUserOperationEditProperties | DefaultUserOperationImportMOSItem + | DefaultUserOperationRetimePiece export interface UserOperationChange { /** Indicate that this change is from user operations */ diff --git a/packages/blueprints-integration/src/triggers.ts b/packages/blueprints-integration/src/triggers.ts index 22691a309b..bcaf6279fb 100644 --- a/packages/blueprints-integration/src/triggers.ts +++ b/packages/blueprints-integration/src/triggers.ts @@ -272,6 +272,12 @@ export interface IShelfAction extends ITriggeredActionBase { filterChain: IGUIContextFilterLink[] } +export interface IEditModeAction extends ITriggeredActionBase { + action: ClientActions.editMode + state: true | false | 'toggle' + filterChain: IGUIContextFilterLink[] +} + export interface IGoToOnAirLineAction extends ITriggeredActionBase { action: ClientActions.goToOnAirLine filterChain: IGUIContextFilterLink[] @@ -325,6 +331,7 @@ export type SomeAction = | IRundownPlaylistResetAction | IRundownPlaylistResyncAction | IShelfAction + | IEditModeAction | IGoToOnAirLineAction | IRewindSegmentsAction | IShowEntireCurrentSegmentAction diff --git a/packages/blueprints-integration/src/userEditing.ts b/packages/blueprints-integration/src/userEditing.ts index 4bed37db8d..0d30165950 100644 --- a/packages/blueprints-integration/src/userEditing.ts +++ b/packages/blueprints-integration/src/userEditing.ts @@ -57,6 +57,11 @@ export interface UserEditingDefinitionSofieDefault { type: UserEditingType.SOFIE /** Id of this operation */ id: DefaultUserOperationsTypes + /** + * If true, the operation is limited to the current part. + * Only applicable for RETIME_PIECE + */ + limitToCurrentPart?: boolean } export enum UserEditingType { diff --git a/packages/blueprints-integration/tsconfig.build.json b/packages/blueprints-integration/tsconfig.build.json index a7a1af8eac..7d4b98c2b9 100755 --- a/packages/blueprints-integration/tsconfig.build.json +++ b/packages/blueprints-integration/tsconfig.build.json @@ -1,15 +1,22 @@ { "extends": "@sofie-automation/code-standard-preset/ts/tsconfig.lib", "include": ["src/**/*.ts"], - "exclude": ["node_modules/**", "src/**/*spec.ts", "src/**/__tests__/*", "src/**/__mocks__/*"], + "exclude": ["node_modules/**", "**/*spec.ts", "**/__tests__/*", "**/__mocks__/*"], "compilerOptions": { "outDir": "./dist", + "rootDir": "./src", "baseUrl": "./", "paths": { "*": ["./node_modules/*"], "@sofie-automation/blueprints-integration": ["./src/index.ts"] }, "resolveJsonModule": true, - "types": ["node"] - } + "types": ["node"], + "composite": true + }, + "references": [ + { + "path": "../shared-lib" + } + ] } diff --git a/packages/corelib/package.json b/packages/corelib/package.json index 1bc936157f..850c40fea0 100644 --- a/packages/corelib/package.json +++ b/packages/corelib/package.json @@ -16,8 +16,6 @@ }, "homepage": "https://github.com/Sofie-Automation/sofie-core/blob/main/packages/corelib#readme", "scripts": { - "build": "run -T rimraf dist && run build:main", - "build:main": "run -T tsc -p tsconfig.build.json", "lint:raw": "run -T eslint", "lint": "run lint:raw .", "unit": "run -T jest", diff --git a/packages/corelib/src/__tests__/hash.spec.ts b/packages/corelib/src/__tests__/hash.spec.ts new file mode 100644 index 0000000000..ea0ff95eee --- /dev/null +++ b/packages/corelib/src/__tests__/hash.spec.ts @@ -0,0 +1,227 @@ +import { hashObj } from '../hash.js' + +describe('hashObj', () => { + describe('primitive types', () => { + test('string values', () => { + expect(hashObj('hello')).toBe(hashObj('hello')) + expect(hashObj('hello')).not.toBe(hashObj('world')) + }) + + test('number values', () => { + expect(hashObj(123)).toBe(hashObj(123)) + expect(hashObj(123)).not.toBe(hashObj(456)) + expect(hashObj(0)).toBe(hashObj(0)) + }) + + test('boolean values', () => { + expect(hashObj(true)).toBe(hashObj(true)) + expect(hashObj(false)).toBe(hashObj(false)) + expect(hashObj(true)).not.toBe(hashObj(false)) + }) + + test('undefined should produce consistent hash', () => { + expect(hashObj(undefined)).toBe(hashObj(undefined)) + }) + + test('null should produce consistent hash', () => { + const hash1 = hashObj(null) + const hash2 = hashObj(null) + expect(hash1).toBe(hash2) + }) + + test('null and undefined should produce different hashes', () => { + expect(hashObj(null)).not.toBe(hashObj(undefined)) + }) + }) + + describe('object stability', () => { + test('same properties in different order should produce same hash', () => { + const obj1 = { a: 1, b: 2, c: 3 } + const obj2 = { c: 3, a: 1, b: 2 } + const obj3 = { b: 2, c: 3, a: 1 } + + expect(hashObj(obj1)).toBe(hashObj(obj2)) + expect(hashObj(obj1)).toBe(hashObj(obj3)) + expect(hashObj(obj2)).toBe(hashObj(obj3)) + }) + + test('different property values should produce different hashes', () => { + const obj1 = { a: 1, b: 2 } + const obj2 = { a: 1, b: 3 } + + expect(hashObj(obj1)).not.toBe(hashObj(obj2)) + }) + + test('different properties should produce different hashes', () => { + const obj1 = { a: 1, b: 2 } + const obj2 = { a: 1, c: 2 } + + expect(hashObj(obj1)).not.toBe(hashObj(obj2)) + }) + }) + + describe('nested objects', () => { + test('nested objects with same structure should produce same hash', () => { + const obj1 = { a: 1, b: { c: 2, d: 3 } } + const obj2 = { b: { d: 3, c: 2 }, a: 1 } + + expect(hashObj(obj1)).toBe(hashObj(obj2)) + }) + + test('deeply nested objects should be stable', () => { + const obj1 = { + level1: { + level2: { + level3: { + value: 'deep', + }, + }, + }, + } + const obj2 = { + level1: { + level2: { + level3: { + value: 'deep', + }, + }, + }, + } + + expect(hashObj(obj1)).toBe(hashObj(obj2)) + }) + + test('objects with null values should work', () => { + const obj1 = { a: 1, b: null } + const obj2 = { b: null, a: 1 } + + expect(() => hashObj(obj1)).not.toThrow() + expect(hashObj(obj1)).toBe(hashObj(obj2)) + }) + + test('objects with undefined values should work', () => { + const obj1 = { a: 1, b: undefined } + const obj2 = { b: undefined, a: 1 } + + expect(() => hashObj(obj1)).not.toThrow() + expect(hashObj(obj1)).toBe(hashObj(obj2)) + }) + }) + + describe('arrays', () => { + test('arrays should produce consistent hashes', () => { + const arr1 = [1, 2, 3] + const arr2 = [1, 2, 3] + + expect(hashObj(arr1)).toBe(hashObj(arr2)) + }) + + test('arrays with different order should produce different hashes', () => { + const arr1 = [1, 2, 3] + const arr2 = [3, 2, 1] + + // Arrays maintain order, so different order = different hash + expect(hashObj(arr1)).not.toBe(hashObj(arr2)) + }) + + test('empty arrays should produce consistent hash', () => { + expect(hashObj([])).toBe(hashObj([])) + }) + + test('nested arrays should work', () => { + const arr1 = [1, [2, 3], 4] + const arr2 = [1, [2, 3], 4] + + expect(hashObj(arr1)).toBe(hashObj(arr2)) + }) + + test('arrays with null should work', () => { + const arr1 = [1, null, 3] + const arr2 = [1, null, 3] + + expect(() => hashObj(arr1)).not.toThrow() + expect(hashObj(arr1)).toBe(hashObj(arr2)) + }) + }) + + describe('edge cases', () => { + test('empty object should produce consistent hash', () => { + expect(hashObj({})).toBe(hashObj({})) + }) + + test('object with empty string key should work', () => { + const obj = { '': 'value' } + expect(() => hashObj(obj)).not.toThrow() + expect(hashObj(obj)).toBe(hashObj({ '': 'value' })) + }) + + test('object with numeric string keys should be stable', () => { + const obj1 = { '1': 'a', '2': 'b' } + const obj2 = { '2': 'b', '1': 'a' } + + expect(hashObj(obj1)).toBe(hashObj(obj2)) + }) + + test('objects with mixed types should work', () => { + const obj = { + string: 'value', + number: 42, + boolean: true, + null: null, + undefined: undefined, + nested: { a: 1 }, + array: [1, 2, 3], + } + + expect(() => hashObj(obj)).not.toThrow() + expect(hashObj(obj)).toBe(hashObj(obj)) + }) + }) + + describe('consistency with simple values', () => { + test('string should be consistent', () => { + const str = 'test' + const hash1 = hashObj(str) + const hash2 = hashObj(str) + expect(hash1).toBe(hash2) + }) + + test('number zero should be different from empty string', () => { + expect(hashObj(0)).not.toBe(hashObj('')) + }) + + test('false should be different from 0', () => { + expect(hashObj(false)).not.toBe(hashObj(0)) + }) + }) + + describe('undefined property equivalence', () => { + test('object with undefined property should equal empty object', () => { + const obj1 = { a: undefined } + const obj2 = {} + + expect(hashObj(obj1)).toBe(hashObj(obj2)) + }) + + test('multiple undefined properties should equal empty object', () => { + const obj1 = { a: undefined, b: undefined } + const obj2 = {} + + expect(hashObj(obj1)).toBe(hashObj(obj2)) + }) + + test('mixed undefined and defined properties', () => { + const obj1 = { a: 1, b: undefined, c: 2 } + const obj2 = { a: 1, c: 2 } + + expect(hashObj(obj1)).toBe(hashObj(obj2)) + }) + + test('nested objects with undefined properties', () => { + const obj1 = { a: 1, b: { c: undefined } } + const obj2 = { a: 1, b: {} } + + expect(hashObj(obj1)).toBe(hashObj(obj2)) + }) + }) +}) diff --git a/packages/corelib/src/dataModel/ExpectedPackageWorkStatuses.ts b/packages/corelib/src/dataModel/ExpectedPackageWorkStatuses.ts index 68f682ebdf..8411fb5791 100644 --- a/packages/corelib/src/dataModel/ExpectedPackageWorkStatuses.ts +++ b/packages/corelib/src/dataModel/ExpectedPackageWorkStatuses.ts @@ -1,6 +1,5 @@ import { ExpectedPackageStatusAPI, Time } from '@sofie-automation/blueprints-integration' -import { ExpectedPackageDBBase } from './ExpectedPackages.js' -import { ExpectedPackageWorkStatusId, PeripheralDeviceId } from './Ids.js' +import { ExpectedPackageId, ExpectedPackageWorkStatusId, PeripheralDeviceId, StudioId } from './Ids.js' /** * ExpectedPackageWorkStatus contains statuses about Work that is being performed on expected packages @@ -10,7 +9,7 @@ import { ExpectedPackageWorkStatusId, PeripheralDeviceId } from './Ids.js' export interface ExpectedPackageWorkStatus extends Omit { _id: ExpectedPackageWorkStatusId - studioId: ExpectedPackageDBBase['studioId'] + studioId: StudioId fromPackages: ExpectedPackageWorkStatusFromPackage[] /** Which PeripheralDevice this update came from */ @@ -20,5 +19,5 @@ export interface ExpectedPackageWorkStatus extends Omit { - id: ExpectedPackageDBBase['_id'] + id: ExpectedPackageId } diff --git a/packages/corelib/src/dataModel/ExpectedPackages.ts b/packages/corelib/src/dataModel/ExpectedPackages.ts index 2e91000143..e2d49c2c26 100644 --- a/packages/corelib/src/dataModel/ExpectedPackages.ts +++ b/packages/corelib/src/dataModel/ExpectedPackages.ts @@ -18,7 +18,7 @@ import { import { ReadonlyDeep } from 'type-fest' /* - Expected Packages are created from Pieces in the rundown. + Expected Packages are created from Pieces and other content in the rundown. A "Package" is a generic term for a "thing that can be played", such as media files, audio, graphics etc.. The blueprints generate Pieces with expectedPackages on them. These are then picked up by a Package Manager who then tries to fullfill the expectations. @@ -26,22 +26,6 @@ import { ReadonlyDeep } from 'type-fest' The Package Manager will then copy the file to the right place. */ -export type ExpectedPackageFromRundown = ExpectedPackageDBFromPiece | ExpectedPackageDBFromAdLibAction - -export type ExpectedPackageFromRundownBaseline = - | ExpectedPackageDBFromBaselineAdLibAction - | ExpectedPackageDBFromBaselineAdLibPiece - | ExpectedPackageDBFromRundownBaselineObjects - | ExpectedPackageDBFromBaselinePiece - -export type ExpectedPackageDBFromBucket = ExpectedPackageDBFromBucketAdLib | ExpectedPackageDBFromBucketAdLibAction - -export type ExpectedPackageDB = - | ExpectedPackageFromRundown - | ExpectedPackageDBFromBucket - | ExpectedPackageFromRundownBaseline - | ExpectedPackageDBFromStudioBaselineObjects - export enum ExpectedPackageDBType { PIECE = 'piece', ADLIB_PIECE = 'adlib_piece', @@ -54,23 +38,60 @@ export enum ExpectedPackageDBType { RUNDOWN_BASELINE_OBJECTS = 'rundown_baseline_objects', STUDIO_BASELINE_OBJECTS = 'studio_baseline_objects', } -export interface ExpectedPackageDBBase extends Omit { - _id: ExpectedPackageId - /** The local package id - as given by the blueprints */ - blueprintPackageId: string + +export interface ExpectedPackageDB { + _id: ExpectedPackageId // derived from rundownId and hash of `package` /** The studio of the Rundown of the Piece this package belongs to */ studioId: StudioId - /** Hash that changes whenever the content or version changes. See getContentVersionHash() */ - contentVersionHash: string - - // pieceId: ProtectedString | null - fromPieceType: ExpectedPackageDBType + /** The rundown this package belongs to, if any. Must not be set when bucketId is set */ + rundownId: RundownId | null + /** The bucket this package belongs to, if any. Must not be set when rundownId is set */ + bucketId: BucketId | null created: Time + + package: ReadonlyDeep> + + /** + * The ingest sources that generated this package. + */ + ingestSources: ExpectedPackageIngestSource[] + + playoutSources: { + /** + * Any playout PieceInstance. This can be any non-reset pieceInstance in the rundown. + * Due to the update flow, this can contain some stale data for a few seconds after a playout operation. + */ + pieceInstanceIds: PieceInstanceId[] + } +} + +export interface ExpectedPackageIngestSourceBase { + /** The id of the package as known by the blueprints */ + blueprintPackageId: string + + /** Whether the blueprints are listening for updates to packageInfos for this package */ + listenToPackageInfoUpdates: boolean | undefined +} + +export interface ExpectedPackageIngestSourceBucketAdlibPiece extends ExpectedPackageIngestSourceBase { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB + /** The Bucket adlib this package belongs to */ + pieceId: BucketAdLibId + /** The `externalId` of the Bucket adlib this package belongs to */ + pieceExternalId: string +} +export interface ExpectedPackageIngestSourceBucketAdlibAction extends ExpectedPackageIngestSourceBase { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION + /** The Bucket adlib-action this package belongs to */ + pieceId: BucketAdLibActionId + /** The `externalId` of the Bucket adlib-action this package belongs to */ + pieceExternalId: string } -export interface ExpectedPackageDBFromPiece extends ExpectedPackageDBBase { + +export interface ExpectedPackageIngestSourcePiece extends ExpectedPackageIngestSourceBase { fromPieceType: ExpectedPackageDBType.PIECE | ExpectedPackageDBType.ADLIB_PIECE /** The Piece this package belongs to */ pieceId: PieceId @@ -78,93 +99,82 @@ export interface ExpectedPackageDBFromPiece extends ExpectedPackageDBBase { partId: PartId /** The Segment this package belongs to */ segmentId: SegmentId - /** The rundown of the Piece this package belongs to */ - rundownId: RundownId } -export interface ExpectedPackageDBFromBaselinePiece extends ExpectedPackageDBBase { +export interface ExpectedPackageIngestSourceAdlibAction extends ExpectedPackageIngestSourceBase { + fromPieceType: ExpectedPackageDBType.ADLIB_ACTION + /** The Piece this package belongs to */ + pieceId: AdLibActionId + /** The Part this package belongs to */ + partId: PartId + /** The Segment this package belongs to */ + segmentId: SegmentId +} +export interface ExpectedPackageIngestSourceBaselinePiece extends ExpectedPackageIngestSourceBase { fromPieceType: ExpectedPackageDBType.BASELINE_PIECE /** The Piece this package belongs to */ pieceId: PieceId - /** The rundown of the Piece this package belongs to */ - rundownId: RundownId } - -export interface ExpectedPackageDBFromBaselineAdLibPiece extends ExpectedPackageDBBase { +export interface ExpectedPackageIngestSourceBaselineAdlibPiece extends ExpectedPackageIngestSourceBase { fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_PIECE /** The Piece this package belongs to */ pieceId: PieceId - /** The rundown of the Piece this package belongs to */ - rundownId: RundownId -} - -export interface ExpectedPackageDBFromAdLibAction extends ExpectedPackageDBBase { - fromPieceType: ExpectedPackageDBType.ADLIB_ACTION - /** The Adlib Action this package belongs to */ - pieceId: AdLibActionId - /** The Part this package belongs to */ - partId: PartId - /** The Segment this package belongs to */ - segmentId: SegmentId - /** The rundown of the Piece this package belongs to */ - rundownId: RundownId } -export interface ExpectedPackageDBFromBaselineAdLibAction extends ExpectedPackageDBBase { +export interface ExpectedPackageIngestSourceBaselineAdlibAction extends ExpectedPackageIngestSourceBase { fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_ACTION /** The Piece this package belongs to */ pieceId: RundownBaselineAdLibActionId - /** The rundown of the Piece this package belongs to */ - rundownId: RundownId } - -export interface ExpectedPackageDBFromRundownBaselineObjects extends ExpectedPackageDBBase { +export interface ExpectedPackageIngestSourceBaselineObjects extends ExpectedPackageIngestSourceBase { fromPieceType: ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS - /** The rundown of the Piece this package belongs to */ - rundownId: RundownId - pieceId: null } -export interface ExpectedPackageDBFromStudioBaselineObjects extends ExpectedPackageDBBase { + +export interface ExpectedPackageIngestSourceStudioBaseline extends ExpectedPackageIngestSourceBase { + // Future: Technically this is a playout source, but for now it needs to be treated as an ingest source fromPieceType: ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS - pieceId: null } -export interface ExpectedPackageDBFromBucketAdLib extends ExpectedPackageDBBase { - fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB - bucketId: BucketId - /** The Bucket adlib this package belongs to */ - pieceId: BucketAdLibId - /** The `externalId` of the Bucket adlib this package belongs to */ - pieceExternalId: string -} -export interface ExpectedPackageDBFromBucketAdLibAction extends ExpectedPackageDBBase { - fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION - bucketId: BucketId - /** The Bucket adlib-action this package belongs to */ - pieceId: BucketAdLibActionId - /** The `externalId` of the Bucket adlib-action this package belongs to */ - pieceExternalId: string -} +export type ExpectedPackageIngestSourcePart = ExpectedPackageIngestSourcePiece | ExpectedPackageIngestSourceAdlibAction -export function getContentVersionHash(expectedPackage: ReadonlyDeep>): string { - return hashObj({ - content: expectedPackage.content, - version: expectedPackage.version, - // todo: should expectedPackage.sources.containerId be here as well? - }) -} +export type ExpectedPackageIngestSourceBucket = + | ExpectedPackageIngestSourceBucketAdlibPiece + | ExpectedPackageIngestSourceBucketAdlibAction +export type ExpectedPackageIngestSourceRundownBaseline = + | ExpectedPackageIngestSourceBaselinePiece + | ExpectedPackageIngestSourceBaselineAdlibPiece + | ExpectedPackageIngestSourceBaselineAdlibAction + | ExpectedPackageIngestSourceBaselineObjects + +export type ExpectedPackageIngestSource = + | ExpectedPackageIngestSourcePart + | ExpectedPackageIngestSourceRundownBaseline + | ExpectedPackageIngestSourceBucket + | ExpectedPackageIngestSourceStudioBaseline + +/** + * Generate the expectedPackageId for the given expectedPackage. + * This is a stable id derived from the package and its parent. This document is expected to be owned by multiple sources. + */ export function getExpectedPackageId( - /** _id of the owner (the piece, adlib etc..) */ - ownerId: - | PieceId - | PieceInstanceId - | AdLibActionId - | RundownBaselineAdLibActionId - | BucketAdLibId - | BucketAdLibActionId - | RundownId - | StudioId, + /** Preferably a RundownId or BucketId, but StudioId is allowed when not owned by a rundown or bucket */ + parentId: RundownId | StudioId | BucketId, /** The locally unique id of the expectedPackage */ - localExpectedPackageId: ExpectedPackage.Base['_id'] + expectedPackage: ReadonlyDeep> ): ExpectedPackageId { - return protectString(`${ownerId}_${getHash(localExpectedPackageId)}`) + // This may be too agressive, but we don't know how to merge some of the properties + const objHash = hashObj({ + ...expectedPackage, + _id: '', // Ignore the _id, this is not guaranteed to be stable + listenToPackageInfoUpdates: false, // Not relevant for the hash + } satisfies ReadonlyDeep) + + return protectString(`${parentId}_${getHash(objHash)}`) +} + +/** + * Returns true if the expected package is referenced by any playout PieceInstances + * @returns boolean + */ +export function isPackageReferencedByPlayout(expectedPackage: Pick): boolean { + return expectedPackage.playoutSources.pieceInstanceIds.length > 0 } diff --git a/packages/corelib/src/dataModel/Old/ExpectedPackagesR52.ts b/packages/corelib/src/dataModel/Old/ExpectedPackagesR52.ts new file mode 100644 index 0000000000..7e74aadc4b --- /dev/null +++ b/packages/corelib/src/dataModel/Old/ExpectedPackagesR52.ts @@ -0,0 +1,137 @@ +import type { ExpectedPackage, Time } from '@sofie-automation/blueprints-integration' +import type { + AdLibActionId, + BucketAdLibActionId, + BucketAdLibId, + BucketId, + ExpectedPackageId, + PartId, + PieceId, + RundownBaselineAdLibActionId, + RundownId, + SegmentId, + StudioId, +} from '../Ids.js' + +/** + * Warning: This is a snapshot of the ExpectedPackage interface from before the rework in R53. + * This should not be modified and should only be used in code performing fixup operations. + */ + +/* + Expected Packages are created from Pieces in the rundown. + A "Package" is a generic term for a "thing that can be played", such as media files, audio, graphics etc.. + The blueprints generate Pieces with expectedPackages on them. + These are then picked up by a Package Manager who then tries to fullfill the expectations. + Example: An ExpectedPackage could be a "Media file to be present on the location used by a playout device". + The Package Manager will then copy the file to the right place. +*/ + +export type ExpectedPackageFromRundown = ExpectedPackageDBFromPiece | ExpectedPackageDBFromAdLibAction + +export type ExpectedPackageFromRundownBaseline = + | ExpectedPackageDBFromBaselineAdLibAction + | ExpectedPackageDBFromBaselineAdLibPiece + | ExpectedPackageDBFromRundownBaselineObjects + +export type ExpectedPackageDBFromBucket = ExpectedPackageDBFromBucketAdLib | ExpectedPackageDBFromBucketAdLibAction + +export type ExpectedPackageDB = + | ExpectedPackageFromRundown + | ExpectedPackageDBFromBucket + | ExpectedPackageFromRundownBaseline + | ExpectedPackageDBFromStudioBaselineObjects + +export enum ExpectedPackageDBType { + PIECE = 'piece', + ADLIB_PIECE = 'adlib_piece', + ADLIB_ACTION = 'adlib_action', + BASELINE_ADLIB_PIECE = 'baseline_adlib_piece', + BASELINE_ADLIB_ACTION = 'baseline_adlib_action', + BUCKET_ADLIB = 'bucket_adlib', + BUCKET_ADLIB_ACTION = 'bucket_adlib_action', + RUNDOWN_BASELINE_OBJECTS = 'rundown_baseline_objects', + STUDIO_BASELINE_OBJECTS = 'studio_baseline_objects', +} +export interface ExpectedPackageDBBase extends Omit { + _id: ExpectedPackageId + /** The local package id - as given by the blueprints */ + blueprintPackageId: string + + /** The studio of the Rundown of the Piece this package belongs to */ + studioId: StudioId + + /** Hash that changes whenever the content or version changes. See getContentVersionHash() */ + contentVersionHash: string + + // pieceId: ProtectedString | null + fromPieceType: ExpectedPackageDBType + + created: Time +} +export interface ExpectedPackageDBFromPiece extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.PIECE | ExpectedPackageDBType.ADLIB_PIECE + /** The Piece this package belongs to */ + pieceId: PieceId + /** The Part this package belongs to */ + partId: PartId + /** The Segment this package belongs to */ + segmentId: SegmentId + /** The rundown of the Piece this package belongs to */ + rundownId: RundownId +} + +export interface ExpectedPackageDBFromBaselineAdLibPiece extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_PIECE + /** The Piece this package belongs to */ + pieceId: PieceId + /** The rundown of the Piece this package belongs to */ + rundownId: RundownId +} + +export interface ExpectedPackageDBFromAdLibAction extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.ADLIB_ACTION + /** The Adlib Action this package belongs to */ + pieceId: AdLibActionId + /** The Part this package belongs to */ + partId: PartId + /** The Segment this package belongs to */ + segmentId: SegmentId + /** The rundown of the Piece this package belongs to */ + rundownId: RundownId +} +export interface ExpectedPackageDBFromBaselineAdLibAction extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_ACTION + /** The Piece this package belongs to */ + pieceId: RundownBaselineAdLibActionId + /** The rundown of the Piece this package belongs to */ + rundownId: RundownId +} + +export interface ExpectedPackageDBFromRundownBaselineObjects extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS + /** The rundown of the Piece this package belongs to */ + rundownId: RundownId + pieceId: null +} +export interface ExpectedPackageDBFromStudioBaselineObjects extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS + pieceId: null +} + +export interface ExpectedPackageDBFromBucketAdLib extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB + bucketId: BucketId + /** The Bucket adlib this package belongs to */ + pieceId: BucketAdLibId + /** The `externalId` of the Bucket adlib this package belongs to */ + pieceExternalId: string +} +export interface ExpectedPackageDBFromBucketAdLibAction extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION + bucketId: BucketId + /** The Bucket adlib-action this package belongs to */ + pieceId: BucketAdLibActionId + /** The `externalId` of the Bucket adlib-action this package belongs to */ + pieceExternalId: string +} diff --git a/packages/corelib/src/dataModel/PackageInfos.ts b/packages/corelib/src/dataModel/PackageInfos.ts index 879875be8a..4305aacd26 100644 --- a/packages/corelib/src/dataModel/PackageInfos.ts +++ b/packages/corelib/src/dataModel/PackageInfos.ts @@ -1,6 +1,5 @@ import { PackageInfo, Time } from '@sofie-automation/blueprints-integration' import { protectString } from '../protectedString.js' -import { ExpectedPackageDB } from './ExpectedPackages.js' import { ExpectedPackageId, PackageInfoId, PeripheralDeviceId, StudioId } from './Ids.js' /** @@ -14,7 +13,7 @@ export interface PackageInfoDB extends PackageInfo.Base { /** Reference to the Package this document has info about */ packageId: ExpectedPackageId /** Reference to the contentVersionHash of the ExpectedPackage, used to reference the expected content+version of the Package */ - expectedContentVersionHash: ExpectedPackageDB['contentVersionHash'] + expectedContentVersionHash: string /** Referring to the actual contentVersionHash of the Package, used to reference the exact content+version of the Package */ actualContentVersionHash: string diff --git a/packages/corelib/src/dataModel/PartInstance.ts b/packages/corelib/src/dataModel/PartInstance.ts index 8c40e66e2f..b4b6baaa2a 100644 --- a/packages/corelib/src/dataModel/PartInstance.ts +++ b/packages/corelib/src/dataModel/PartInstance.ts @@ -1,7 +1,7 @@ import { PartEndState, Time } from '@sofie-automation/blueprints-integration' import { PartCalculatedTimings } from '../playout/timings.js' import { PartInstanceId, RundownId, RundownPlaylistActivationId, SegmentId, SegmentPlayoutId } from './Ids.js' -import { DBPart } from './Part.js' +import { DBPart, PartInvalidReason } from './Part.js' export interface DBPartInstance { _id: PartInstanceId @@ -40,6 +40,13 @@ export interface DBPartInstance { /** If taking out of the current part is blocked, this is the time it is blocked until */ blockTakeUntil?: number + + /** + * If set, this PartInstance exists and is valid as being next, but it cannot be taken in its current state. + * This can be used to block taking a PartInstance that requires user action to resolve. + * This is a runtime validation issue, distinct from the planned `invalidReason` on the Part itself. + */ + invalidReason?: PartInvalidReason } export interface PartInstanceTimings { diff --git a/packages/corelib/src/dataModel/PieceInstance.ts b/packages/corelib/src/dataModel/PieceInstance.ts index 7c69166887..1847a55969 100644 --- a/packages/corelib/src/dataModel/PieceInstance.ts +++ b/packages/corelib/src/dataModel/PieceInstance.ts @@ -7,6 +7,7 @@ import { RundownId, PartInstanceId, PieceId, + ExpectedPackageId, } from './Ids.js' import { Piece } from './Piece.js' import { omit } from '../lib.js' @@ -74,6 +75,13 @@ export interface PieceInstance { reportedStoppedPlayback?: Time plannedStartedPlayback?: Time plannedStoppedPlayback?: Time + + /** + * The IDs of ExpectedPackages that are needed for this PieceInstance + * This matches the data on `this.piece.expectedPackages`, resolved to the full database IDs + * Future: This should replace the expectedPackages on Piece entirely + */ + neededExpectedPackageIds?: ExpectedPackageId[] } export interface ResolvedPieceInstance { diff --git a/packages/corelib/src/dataModel/UserEditingDefinitions.ts b/packages/corelib/src/dataModel/UserEditingDefinitions.ts index c9ddfcf0dd..fe0911e986 100644 --- a/packages/corelib/src/dataModel/UserEditingDefinitions.ts +++ b/packages/corelib/src/dataModel/UserEditingDefinitions.ts @@ -96,4 +96,9 @@ export interface CoreUserEditingDefinitionSofie { type: UserEditingType.SOFIE /** Id of this operation */ id: DefaultUserOperationsTypes + /** + * If true, the operation is limited to the current part. + * Only applicable for RETIME_PIECE + */ + limitToCurrentPart?: boolean } diff --git a/packages/corelib/src/error.ts b/packages/corelib/src/error.ts index 3cbf71c558..38338de605 100644 --- a/packages/corelib/src/error.ts +++ b/packages/corelib/src/error.ts @@ -64,6 +64,7 @@ export enum UserErrorMessage { IdempotencyKeyAlreadyUsed = 48, RateLimitExceeded = 49, SystemSingleStudio = 50, + TakePartInstanceInvalid = 51, } const UserErrorMessagesTranslations: { [key in UserErrorMessage]: string } = { @@ -126,6 +127,7 @@ const UserErrorMessagesTranslations: { [key in UserErrorMessage]: string } = { [UserErrorMessage.IdempotencyKeyAlreadyUsed]: t(`Idempotency-Key is already used`), [UserErrorMessage.RateLimitExceeded]: t(`Rate limit exceeded`), [UserErrorMessage.SystemSingleStudio]: t(`System must have exactly one studio`), + [UserErrorMessage.TakePartInstanceInvalid]: t(`Part has issues and cannot be taken`), } export interface SerializedUserError { diff --git a/packages/corelib/src/hash.ts b/packages/corelib/src/hash.ts index 3b38556f37..beb6ab2089 100644 --- a/packages/corelib/src/hash.ts +++ b/packages/corelib/src/hash.ts @@ -8,7 +8,15 @@ export function getHash(str: string): string { /** Creates a hash based on the object properties (excluding ordering of properties) */ // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types export function hashObj(obj: any): string { - if (typeof obj === 'object') { + if (obj === null) return 'null' + if (obj === undefined) return 'undefined' + + if (Array.isArray(obj)) { + // For arrays, we care about the order, and should preserve undefined + const strs = obj.map((val, i) => `${i}:${hashObj(val)}`) + + return getHash(strs.join('|')) + } else if (typeof obj === 'object') { const keys = Object.keys(obj).sort((a, b) => { if (a > b) return 1 if (a < b) return -1 @@ -17,7 +25,11 @@ export function hashObj(obj: any): string { const strs: string[] = [] for (const key of keys) { - strs.push(hashObj(obj[key])) + const val = obj[key] + // Skip undefined values to make {a: undefined} hash the same as {}, matching how JSON/mongo serialization will behave + if (val !== undefined) { + strs.push(`${key}:${hashObj(val)}`) + } } return getHash(strs.join('|')) } diff --git a/packages/corelib/src/mongo.ts b/packages/corelib/src/mongo.ts index 55025f013e..aebfa7fb97 100644 --- a/packages/corelib/src/mongo.ts +++ b/packages/corelib/src/mongo.ts @@ -119,7 +119,14 @@ export function mongoWhere(o: Record, selector: MongoQuery): const oAttr = o[key] if (_.isObject(s)) { - if (_.has(s, '$gt')) { + if (_.has(s, '$elemMatch')) { + // Handle $elemMatch for array fields + if (Array.isArray(oAttr)) { + ok = oAttr.some((item) => mongoWhere(item, s.$elemMatch)) + } else { + ok = false + } + } else if (_.has(s, '$gt')) { ok = oAttr > s.$gt } else if (_.has(s, '$gte')) { ok = oAttr >= s.$gte @@ -222,7 +229,7 @@ export function mongoFindOptions }>( const newDoc: any = {} // any since includeKeys breaks strict typings anyway for (const key of includeKeys) { - objectPath.set(newDoc, key, objectPath.get(doc, key)) + projectFieldIntoDoc(doc, newDoc, key) } return newDoc @@ -246,6 +253,69 @@ export function mongoFindOptions }>( return docs } +/** + * Project a field from a source document into a target document. + * Handles nested paths through arrays like MongoDB does. + * e.g., 'items.name' on {items: [{name: 'a', value: 1}]} => {items: [{name: 'a'}]} + */ +function projectFieldIntoDoc(source: any, target: any, path: string): void { + const parts = path.split('.') + let currentSource = source + let currentTarget = target + + for (let i = 0; i < parts.length; i++) { + const part = parts[i] + const isLast = i === parts.length - 1 + const remainingPath = parts.slice(i + 1).join('.') + + if (currentSource === undefined || currentSource === null) { + return + } + + if (Array.isArray(currentSource)) { + // Handle array - project the field from each element + if (!Array.isArray(currentTarget)) { + // Initialize as empty array if not already an array + const parentPath = parts.slice(0, i).join('.') + if (parentPath) { + objectPath.set(target, parentPath, []) + currentTarget = objectPath.get(target, parentPath) + } else { + return // Can't set root to array + } + } + + // Project the remaining path into each array element + for (let j = 0; j < currentSource.length; j++) { + if (currentTarget[j] === undefined) { + currentTarget[j] = {} + } + const subPath = isLast ? part : [part, remainingPath].join('.') + projectFieldIntoDoc(currentSource[j], currentTarget[j], subPath) + } + return + } + + if (isLast) { + // We've reached the final part of the path + if (currentSource[part] !== undefined) { + currentTarget[part] = currentSource[part] + } + } else { + // Navigate deeper + if (currentTarget[part] === undefined) { + if (Array.isArray(currentSource[part])) { + currentTarget[part] = [] + } else { + currentTarget[part] = {} + } + } + currentSource = currentSource[part] + currentTarget = currentTarget[part] + } + } +} + export function mongoModify }>( selector: MongoQuery, doc: TDoc, @@ -411,17 +481,29 @@ export function pushOntoPath(obj: Record, path: string, valu * Push a value from a object, when the value matches * @param obj Object * @param path Path to array in object - * @param valueToPush Value to push onto array + * @param matchValue Value to match for removal. Supports $in operator for matching multiple values. */ export function pullFromPath(obj: Record, path: string, matchValue: T): void { const mutator = (o: Record, lastAttr: string) => { if (_.has(o, lastAttr)) { - if (!_.isArray(o[lastAttr])) + const arrAttr = o[lastAttr] + if (!arrAttr || !Array.isArray(arrAttr)) throw new Error( - 'Object propery "' + lastAttr + '" is not an array ("' + o[lastAttr] + '") (in path "' + path + '")' + 'Object propery "' + lastAttr + '" is not an array ("' + arrAttr + '") (in path "' + path + '")' ) - return (o[lastAttr] = _.filter(o[lastAttr] as any, (entry: T) => !_.isMatch(entry, matchValue))) + // Handle $in operator for matching multiple values + if ( + matchValue && + typeof matchValue === 'object' && + '$in' in matchValue && + Array.isArray((matchValue as Record).$in) + ) { + const inValues = (matchValue as Record).$in as unknown[] + return (o[lastAttr] = arrAttr.filter((entry: T) => !inValues.includes(entry))) + } + + return (o[lastAttr] = arrAttr.filter((entry: T) => !_.isMatch(entry, matchValue))) } else { return undefined } diff --git a/packages/corelib/src/pubsub.ts b/packages/corelib/src/pubsub.ts index 6a6e7783c4..e14e47894a 100644 --- a/packages/corelib/src/pubsub.ts +++ b/packages/corelib/src/pubsub.ts @@ -18,7 +18,7 @@ import { Blueprint } from './dataModel/Blueprint.js' import { BucketAdLibAction } from './dataModel/BucketAdLibAction.js' import { BucketAdLib } from './dataModel/BucketAdLibPiece.js' import { ExpectedPackageWorkStatus } from './dataModel/ExpectedPackageWorkStatuses.js' -import { ExpectedPackageDBBase } from './dataModel/ExpectedPackages.js' +import { ExpectedPackageDB } from './dataModel/ExpectedPackages.js' import { ExternalMessageQueueObj } from './dataModel/ExternalMessageQueue.js' import { PackageContainerStatusDB } from './dataModel/PackageContainerStatus.js' import { PeripheralDevice } from './dataModel/PeripheralDevice.js' @@ -356,7 +356,7 @@ export type CorelibPubSubCollections = { [CollectionName.Buckets]: Bucket [CollectionName.BucketAdLibActions]: BucketAdLibAction [CollectionName.BucketAdLibPieces]: BucketAdLib - [CollectionName.ExpectedPackages]: ExpectedPackageDBBase + [CollectionName.ExpectedPackages]: ExpectedPackageDB [CollectionName.ExpectedPackageWorkStatuses]: ExpectedPackageWorkStatus [CollectionName.ExternalMessageQueue]: ExternalMessageQueueObj [CollectionName.Notifications]: DBNotificationObj diff --git a/packages/corelib/src/settings/__tests__/objectWithOverrides.spec.ts b/packages/corelib/src/settings/__tests__/objectWithOverrides.spec.ts index af46f14743..c4ec678162 100644 --- a/packages/corelib/src/settings/__tests__/objectWithOverrides.spec.ts +++ b/packages/corelib/src/settings/__tests__/objectWithOverrides.spec.ts @@ -1,4 +1,5 @@ import { literal } from '../../lib.js' +import clone from 'fast-clone' import { applyAndValidateOverrides, ObjectWithOverrides, @@ -186,8 +187,8 @@ describe('applyAndValidateOverrides', () => { }, }, overrides: [ - { op: 'set', path: 'valA', value: 'def' }, { op: 'set', path: 'valB.valD', value: 'uvw' }, + { op: 'set', path: 'valA', value: 'def' }, { op: 'set', path: 'valB.valC', value: 6 }, ], }) @@ -235,4 +236,189 @@ describe('applyAndValidateOverrides', () => { }) ) }) + + test('update overrides - add to existing overrides', () => { + const inputObj: BasicType = { + valA: 'abc', + valB: { + valC: 5, + valD: 'foo', + }, + } + + const inputObjWithOverrides: ObjectWithOverrides = { + defaults: inputObj, + overrides: [ + { op: 'set', path: 'valA', value: 'def' }, + { op: 'set', path: 'valB.valC', value: 6 }, + ], + } + + const updateObj: BasicType = { + valA: 'ghi', + valB: { + valC: 7, + valD: 'bar', + }, + } + + const res = updateOverrides(inputObjWithOverrides, updateObj) + expect(res).toBeTruthy() + + expect(res).toStrictEqual( + literal>({ + defaults: { + valA: 'abc', + valB: { + valC: 5, + valD: 'foo', + }, + }, + overrides: [ + { op: 'set', path: 'valA', value: 'ghi' }, + { op: 'set', path: 'valB.valC', value: 7 }, + { op: 'set', path: 'valB.valD', value: 'bar' }, + ], + }) + ) + }) + + test('update overrides - add to existing overrides #2', () => { + const inputObj = { + valA: 'abc', + valB: { + '0': { propA: 35, propB: 'Mic 1' }, + '1': { propA: 36, propB: 'Mic 2' }, + '2': { propA: 37, propB: 'Mic 3' }, + }, + } + + const inputObjWithOverrides: ObjectWithOverrides = { + defaults: inputObj, + overrides: [ + { + op: 'set', + path: 'valB.0.propC', + value: true, + }, + { + op: 'set', + path: 'valB.0.propD', + value: true, + }, + { op: 'set', path: 'valB.1.propC', value: true }, + ], + } + + const updateObj = { + valA: 'abc', + valB: { + '0': { propA: 35, propB: 'Mic 1', propC: true, propD: true }, + '1': { propA: 36, propB: 'Mic 2', propC: true }, + '2': { propA: 37, propB: 'Mic 3', propC: true }, + }, + } + + const res = updateOverrides(inputObjWithOverrides, updateObj) + expect(res).toBeTruthy() + + expect(res).toStrictEqual( + literal>({ + defaults: clone(inputObj), + overrides: [ + { + op: 'set', + path: 'valB.0.propC', + value: true, + }, + { + op: 'set', + path: 'valB.0.propD', + value: true, + }, + { op: 'set', path: 'valB.1.propC', value: true }, + { op: 'set', path: 'valB.2.propC', value: true }, + ], + }) + ) + }) + + test('update overrides - delete key', () => { + const inputObj = { + valA: 'abc', + valB: { + '0': { propA: 35, propB: 'Mic 1' }, + '1': { propA: 36, propB: 'Mic 2' }, + '2': { propA: 37, propB: 'Mic 3' }, + }, + } + + const inputObjWithOverrides: ObjectWithOverrides = { + defaults: inputObj, + overrides: [], + } + + const updateObj = { + valA: 'abc', + valB: { + '0': { propA: 35, propB: 'Mic 1' }, + '1': { propA: 36, propB: 'Mic 2' }, + }, + } + + const res = updateOverrides(inputObjWithOverrides, updateObj) + expect(res).toBeTruthy() + + expect(res).toStrictEqual( + literal>({ + defaults: clone(inputObj), + overrides: [ + { + op: 'delete', + path: 'valB.2', + }, + ], + }) + ) + }) + + test('update overrides - delete value', () => { + const inputObj = { + valA: 'abc', + valB: { + '0': { propA: 35, propB: 'Mic 1' }, + '1': { propA: 36, propB: 'Mic 2' }, + '2': { propA: 37, propB: 'Mic 3' }, + }, + } + + const inputObjWithOverrides: ObjectWithOverrides = { + defaults: inputObj, + overrides: [], + } + + const updateObj = { + valA: 'abc', + valB: { + '0': { propA: 35, propB: 'Mic 1' }, + '1': { propA: 36, propB: 'Mic 2' }, + '2': { propA: 37 }, + }, + } + + const res = updateOverrides(inputObjWithOverrides, updateObj) + expect(res).toBeTruthy() + + expect(res).toStrictEqual( + literal>({ + defaults: clone(inputObj), + overrides: [ + { + op: 'delete', + path: 'valB.2.propB', + }, + ], + }) + ) + }) }) diff --git a/packages/corelib/src/settings/objectWithOverrides.ts b/packages/corelib/src/settings/objectWithOverrides.ts index 32dce80b53..8103324298 100644 --- a/packages/corelib/src/settings/objectWithOverrides.ts +++ b/packages/corelib/src/settings/objectWithOverrides.ts @@ -2,6 +2,7 @@ import * as objectPath from 'object-path' import { ReadonlyDeep } from 'type-fest' import _ from 'underscore' import { assertNever, clone, literal } from '../lib.js' +import { ReadonlyObjectDeep } from 'type-fest/source/readonly-deep' /** * This is an object which allows for overrides to be tracked and reapplied @@ -88,55 +89,96 @@ export function updateOverrides( curObj: ReadonlyDeep>, rawObj: ReadonlyDeep ): ObjectWithOverrides { - const result: ObjectWithOverrides = { defaults: clone(curObj.defaults), overrides: [] } - for (const [key, value] of Object.entries(rawObj)) { - const override = curObj.overrides.find((ov) => { - const parentPath = getParentObjectPath(ov.path) - return key === (parentPath ? parentPath : ov.path) - }) - if (override) { - // Some or all members of the property are already overridden in curObj - if (objectPath.has(rawObj, override.path)) { - const rawValue = objectPath.get(rawObj, override.path) - if (override.op === 'delete' || (override.op === 'set' && _.isEqual(rawValue, override.value))) { - // Preserve all existing delete overrides and any set overrides where the value is not updated - result.overrides.push(override) - } + const overrides = getOverridesToPreserve(curObj, rawObj) + + // apply preserved overrides on top of the defaults + const tmpObj: ReadonlyDeep> = { defaults: clone(curObj.defaults), overrides: overrides } + const flattenedObjWithPreservedOverrides = applyAndValidateOverrides(tmpObj).obj + + // calculate overrides that are still missing + recursivelyGenerateOverrides(flattenedObjWithPreservedOverrides, rawObj, [], overrides) + + return { defaults: clone(curObj.defaults), overrides: overrides } +} + +function getOverridesToPreserve( + curObj: ReadonlyObjectDeep>, + rawObj: ReadonlyDeep +) { + const overrides: SomeObjectOverrideOp[] = [] + curObj.overrides.forEach((override) => { + const rawValue = objectPath.get(rawObj, override.path) + if ( + (override.op === 'delete' && rawValue === undefined) || + (override.op === 'set' && _.isEqual(rawValue, override.value)) + ) { + // what was deleted, remains deleted, or what was set remaines equal + overrides.push(override) + return + } + const defaultValue = objectPath.get(curObj.defaults, override.path) + if (override.op === 'delete') { + if (_.isEqual(rawValue, defaultValue)) { + // previously deleted, brought back to defaults + return } + // was deleted, but is brought to non-default value + overrides.push({ + op: 'set', + path: override.path, + value: rawValue, + }) } + }) + return overrides +} - // check the values of the raw object against the current object, generating an override for each difference - const appliedCurObj = applyAndValidateOverrides(curObj).obj - for (const [curKey, curValue] of Object.entries(appliedCurObj)) { - if (key === curKey && !_.isEqual(value, curValue)) { - // Some or all members of the property have been modified - if (typeof value === 'object') { - // check one level down info the potentially modified object - for (const [rawKey, rawValue] of Object.entries(value)) { - if (!_.isEqual(rawValue, curValue[rawKey])) { - result.overrides.push( - literal({ - op: 'set', - path: `${key}.${rawKey}`, - value: rawValue, - }) - ) - } - } - } else { - result.overrides.push( - literal({ - op: 'set', - path: key, - value: value, - }) - ) - } - } +function recursivelyGenerateOverrides( + curObj: ReadonlyDeep, + rawObj: ReadonlyDeep, + path: string[], + outOverrides: SomeObjectOverrideOp[] +) { + for (const [curKey, curValue] of Object.entries(curObj)) { + const rawValue = objectPath.get(rawObj, curKey) + const fullKeyPath = [...path, curKey] + const fullKeyPathString = fullKeyPath.join('.') + if (curValue !== undefined && rawValue === undefined) { + outOverrides.push({ + op: 'delete', + path: fullKeyPathString, + }) + continue + } + if (Array.isArray(rawValue) && !_.isEqual(curValue, rawValue)) { + outOverrides.push({ + op: 'set', + path: fullKeyPathString, + value: rawValue, + }) + } + if (typeof curValue === 'object' && curValue !== null && typeof rawValue === 'object' && rawValue !== null) { + recursivelyGenerateOverrides(curValue, rawValue, fullKeyPath, outOverrides) + continue + } + if (curValue !== rawValue) { + outOverrides.push({ + op: 'set', + path: fullKeyPathString, + value: rawValue, + }) + } + } + for (const [rawKey, rawValue] of Object.entries(rawObj)) { + const curValue = objectPath.get(curObj, rawKey) + if (curValue === undefined && rawValue !== undefined) { + outOverrides.push({ + op: 'set', + path: [...path, rawKey].join('.'), + value: rawValue, + }) } - // } } - return result } /** diff --git a/packages/corelib/src/snapshots.ts b/packages/corelib/src/snapshots.ts index 9791c5c719..031addbb6b 100644 --- a/packages/corelib/src/snapshots.ts +++ b/packages/corelib/src/snapshots.ts @@ -35,6 +35,6 @@ export interface CoreRundownPlaylistSnapshot { adLibActions: Array baselineAdLibActions: Array expectedPlayoutItems: Array - expectedPackages: Array + expectedPackages: Array // Note: when reading, this could be in the old format timeline?: TimelineComplete } diff --git a/packages/corelib/src/worker/ingest.ts b/packages/corelib/src/worker/ingest.ts index 3e27a13bc1..ad2c081939 100644 --- a/packages/corelib/src/worker/ingest.ts +++ b/packages/corelib/src/worker/ingest.ts @@ -104,10 +104,6 @@ export enum IngestJobs { */ MosSwapStory = 'mosSwapStory', - /** - * Debug: Regenerate ExpectedPackages for a Rundown - */ - ExpectedPackagesRegenerate = 'expectedPackagesRegenerate', /** * Some PackageInfos have been updated, regenerate any Parts which depend on these PackageInfos */ @@ -229,9 +225,6 @@ export interface MosSwapStoryProps extends IngestPropsBase { story1: MOS.IMOSString128 } -export interface ExpectedPackagesRegenerateProps { - rundownId: RundownId -} export interface PackageInfosUpdatedRundownProps extends IngestPropsBase { packageIds: ExpectedPackageId[] } @@ -312,7 +305,6 @@ export type IngestJobFunc = { [IngestJobs.MosMoveStory]: (data: MosMoveStoryProps) => void [IngestJobs.MosSwapStory]: (data: MosSwapStoryProps) => void - [IngestJobs.ExpectedPackagesRegenerate]: (data: ExpectedPackagesRegenerateProps) => void [IngestJobs.PackageInfosUpdatedRundown]: (data: PackageInfosUpdatedRundownProps) => void [IngestJobs.UserRemoveRundown]: (data: UserRemoveRundownProps) => void diff --git a/packages/corelib/src/worker/studio.ts b/packages/corelib/src/worker/studio.ts index e5df4d1311..6eb045fc5e 100644 --- a/packages/corelib/src/worker/studio.ts +++ b/packages/corelib/src/worker/studio.ts @@ -205,6 +205,12 @@ export enum StudioJobs { * for use in ad.lib actions and other triggers */ SwitchRouteSet = 'switchRouteSet', + + /** + * Cleanup any expected packages playout references that are orphaned + * During playout it is hard to track removal of PieceInstances (particularly when resetting PieceInstances) + */ + CleanupOrphanedExpectedPackageReferences = 'cleanupOrphanedExpectedPackageReferences', } export interface RundownPlayoutPropsBase { @@ -369,6 +375,11 @@ export interface SwitchRouteSetProps { state: boolean | 'toggle' } +export interface CleanupOrphanedExpectedPackageReferencesProps { + playlistId: RundownPlaylistId + rundownId: RundownId +} + /** * Set of valid functions, of form: * `id: (data) => return` @@ -425,6 +436,8 @@ export type StudioJobFunc = { [StudioJobs.ClearQuickLoopMarkers]: (data: ClearQuickLoopMarkersProps) => void [StudioJobs.SwitchRouteSet]: (data: SwitchRouteSetProps) => void + + [StudioJobs.CleanupOrphanedExpectedPackageReferences]: (data: CleanupOrphanedExpectedPackageReferencesProps) => void } export function getStudioQueueName(id: StudioId): string { diff --git a/packages/corelib/tsconfig.build.json b/packages/corelib/tsconfig.build.json index 6dfa10a429..44c5873a35 100755 --- a/packages/corelib/tsconfig.build.json +++ b/packages/corelib/tsconfig.build.json @@ -1,9 +1,10 @@ { "extends": "@sofie-automation/code-standard-preset/ts/tsconfig.lib", "include": ["src/**/*.ts"], - "exclude": ["node_modules/**", "src/**/*spec.ts", "src/**/__tests__/*", "src/**/__mocks__/*"], + "exclude": ["node_modules/**", "**/*spec.ts", "**/__tests__/*", "**/__mocks__/*"], "compilerOptions": { "target": "es2019", + "rootDir": "./src", "outDir": "./dist", "baseUrl": "./", "paths": { @@ -12,6 +13,15 @@ }, "resolveJsonModule": true, "types": ["node"], - "esModuleInterop": true - } + "esModuleInterop": true, + "composite": true + }, + "references": [ + { + "path": "../shared-lib" + }, + { + "path": "../blueprints-integration" + } + ] } diff --git a/packages/documentation/docs/user-guide/features/prompter.md b/packages/documentation/docs/user-guide/features/prompter.md index 6fc4f1f455..aba0e34ec0 100644 --- a/packages/documentation/docs/user-guide/features/prompter.md +++ b/packages/documentation/docs/user-guide/features/prompter.md @@ -46,6 +46,7 @@ The prompter can be controlled by different types of controllers. The control mo | `?mode=shuttlewebhid` | Controlled by a Contour Design ShuttleXpress, using the browser's WebHID API [See configuration details](prompter.md#control-using-contour-shuttlexpress-via-webhid) | | `?mode=pedal` | Controlled by any MIDI device outputting note values between 0 - 127 of CC notes on channel 8. Analogue Expression pedals work well with TRS-USB midi-converters. [See configuration details](prompter.md#control-using-midi-input-modepedal) | | `?mode=joycon` | Controlled by Nintendo Switch Joycon, using the HTML5 GamePad API. [See configuration details](prompter.md#control-using-nintendo-joycon-gamepad) | +| `?mode=xbox` | Controlled by Xbox controller, using the HTML5 GamePad API. [See configuration details](prompter.md#control-using-xbox-controller-modexbox) | #### Control using mouse \(scroll wheel\) @@ -161,13 +162,14 @@ The Joycons can operate in 3 modes, the L-stick, the R-stick or both L+R sticks | Query parameter | Type | Description | Default | | :----------------------- | :--------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :--------------------------- | -| `joycon_speedMap` | Array of numbes | Speeds to scroll by \(px. pr. frame - approx 60fps\) when scrolling forwards. The beginning of the forwards-range maps to the first number in this array, and thee end of the forwards-range map to the end of this array. All values in between are being interpolated in a spline curve. | `[1, 2, 3, 4, 5, 8, 12, 30]` | +| `joycon_speedMap` | Array of numbers | Speeds to scroll by \(px. pr. frame - approx 60fps\) when scrolling forwards. The beginning of the forwards-range maps to the first number in this array, and the end of the forwards-range map to the end of this array. All values in between are being interpolated in a spline curve. | `[1, 2, 3, 4, 5, 8, 12, 30]` | | `joycon_reverseSpeedMap` | Array of numbers | Same as `joycon_speedMap` but for the backwards range. | `[1, 2, 3, 4, 5, 8, 12, 30]` | | `joycon_rangeRevMin` | number | The end of the backwards-range, full speed backwards. | `-1` | | `joycon_rangeNeutralMin` | number | The beginning of the backwards-range. | `-0.25` | | `joycon_rangeNeutralMax` | number | The minimum input to run forward, the start of the forward-range \(min speed\). This is also the end of any "deadband" you want filter out before starting moving forwards. | `0.25` | | `joycon_rangeFwdMax` | number | The maximum input, the end of the forward-range \(max speed\) | `1` | | `joycon_rightHandOffset` | number | A ratio to increase or decrease the R Joycon joystick sensitivity relative to the L Joycon. | `1.4` | +| `joycon_invertJoystick` | 0 / 1 | Invert the joystick direction. When enabled, pushing the joystick forward scrolls up instead of down. | `1` | - `joycon_rangeNeutralMin` has to be greater than `joycon_rangeRevMin` - `joycon_rangeNeutralMax` has to be greater than `joycon_rangeNeutralMin` @@ -197,3 +199,47 @@ You can turn on `?debug=1` to see how your input maps to an output. | _"I can't reach max speed backwards"_ | Increase `joycon_rangeRevMin` | | _"I can't reach max speed forwards"_ | Decrease `joycon_rangeFwdMax` | | _"As I find a good speed, it varies a bit in speed up/down even if I hold my finger still"_ | Use `?debug=1` to see what speed is calculated in the position the presenter wants to rest their finger in. Add more of that number in a sequence in the `joycon_speedMap` to flatten out the speed curve, i.e. `[1, 2, 3, 4, 4, 4, 4, 5, ...]` | + +#### Control using Xbox controller \(_?mode=xbox_\) + +This mode uses the browser's Gamepad API to control the prompter with an Xbox controller. It supports Xbox One, Xbox Series X|S, and compatible third-party controllers. + +The controller can be connected via Bluetooth or USB. **Note:** On macOS, Xbox controllers may not be recognized over USB due to driver limitations; Bluetooth is recommended. + +**Scroll control:** + +- **Right Trigger (RT):** Scroll forward - speed is proportional to trigger pressure +- **Left Trigger (LT):** Scroll backward - speed is proportional to trigger pressure + +**Button map:** + +| **Button** | **Action** | +| :---------------- | :------------------------ | +| A | Take (go to next part) | +| B | Go to the "On-air" story | +| X | Go to the previous story | +| Y | Go to the following story | +| LB (Left Bumper) | Go to the top | +| RB (Right Bumper) | Go to the "Next" story | +| D-Pad Up | Scroll up (fine control) | +| D-Pad Down | Scroll down (fine control)| + +**Configuration parameters:** + +| Query parameter | Type | Description | Default | +| :--------------------- | :--------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------- | :--------------------------- | +| `xbox_speedMap` | Array of numbers | Speeds to scroll by (px per frame, ~60fps) when scrolling forwards. Values are interpolated using a spline curve based on trigger pressure. | `[2, 3, 5, 6, 8, 12, 18, 45]` | +| `xbox_reverseSpeedMap` | Array of numbers | Same as `xbox_speedMap` but for the backwards range (left trigger). | `[2, 3, 5, 6, 8, 12, 18, 45]` | +| `xbox_triggerDeadZone` | number | Dead zone for the triggers, to prevent accidental scrolling. Value between 0 and 1. | `0.1` | + +You can turn on `?debug=1` to see how your trigger input maps to scroll speed. + +**Calibration guide:** + +| **Symptom** | **Adjustment** | +| :----------------------------------------------------------- | :-------------------------------------------------------------------------------------------------------------------------- | +| _"It starts scrolling when I'm not touching the trigger"_ | Increase `xbox_triggerDeadZone` (e.g., `0.15` or `0.2`) | +| _"I have to press too hard before it starts moving"_ | Decrease `xbox_triggerDeadZone` (e.g., `0.05`) | +| _"It scrolls too fast"_ | Use smaller values in `xbox_speedMap`, e.g., `[1, 2, 3, 4, 5, 8, 12, 30]` | +| _"It scrolls too slow"_ | Use larger values in `xbox_speedMap`, e.g., `[3, 6, 10, 15, 25, 40, 60, 100]` | +| _"Speed jumps too quickly from slow to fast"_ | Add more intermediate values to `xbox_speedMap` to create a smoother curve, e.g., `[1, 2, 3, 4, 5, 6, 8, 10, 15, 20, 30]` | diff --git a/packages/job-worker/package.json b/packages/job-worker/package.json index 21ed27ecfc..db14f7971d 100644 --- a/packages/job-worker/package.json +++ b/packages/job-worker/package.json @@ -15,10 +15,6 @@ }, "homepage": "https://github.com/Sofie-Automation/sofie-core/blob/main/packages/job-worker#readme", "scripts": { - "dev": "run -T nodemon --config nodemon.json src/index.ts", - "dev:debug": "run -T nodemon --config nodemon.json --inspect-brk src/index.ts", - "build": "run -T rimraf dist && run build:main", - "build:main": "run -T tsc -p tsconfig.build.json", "lint:raw": "run -T eslint", "lint": "run lint:raw .", "unit": "run -T jest", diff --git a/packages/job-worker/src/__mocks__/collection.ts b/packages/job-worker/src/__mocks__/collection.ts index 0c63527d77..065f9ee098 100644 --- a/packages/job-worker/src/__mocks__/collection.ts +++ b/packages/job-worker/src/__mocks__/collection.ts @@ -190,6 +190,16 @@ export class MockMongoCollection }> imp return docs.length } + private async removeOne(selector: MongoQuery | TDoc['_id']): Promise { + this.#ops.push({ type: 'removeOne', args: [selector] }) + + const docs: Pick[] = await this.findFetchInner(selector, { projection: { _id: 1 }, limit: 1 }) + for (const doc of docs) { + this.#documents.delete(doc._id) + } + + return docs.length + } async update(selector: MongoQuery | TDoc['_id'], modifier: MongoModifier): Promise { return this.updateInner(selector, modifier, false) } @@ -231,8 +241,12 @@ export class MockMongoCollection }> imp await this.updateInner(op.updateOne.filter, op.updateOne.update, true) } else if ('replaceOne' in op) { await this.replace(op.replaceOne.replacement as any) + } else if ('insertOne' in op) { + await this.insertOne(op.insertOne.document as any) } else if ('deleteMany' in op) { await this.remove(op.deleteMany.filter) + } else if ('deleteOne' in op) { + await this.removeOne(op.deleteOne.filter) } else { // Note: implement more as we start using them throw new Error(`Unknown mongo Bulk Operation: ${JSON.stringify(op)}`) diff --git a/packages/job-worker/src/__mocks__/context.ts b/packages/job-worker/src/__mocks__/context.ts index d11c3c5431..85ba9eaa9e 100644 --- a/packages/job-worker/src/__mocks__/context.ts +++ b/packages/job-worker/src/__mocks__/context.ts @@ -29,7 +29,7 @@ import { clone } from '@sofie-automation/corelib/dist/lib' import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { EventsJobFunc } from '@sofie-automation/corelib/dist/worker/events' import { IngestJobFunc } from '@sofie-automation/corelib/dist/worker/ingest' -import { StudioJobFunc } from '@sofie-automation/corelib/dist/worker/studio' +import { StudioJobFunc, StudioJobs } from '@sofie-automation/corelib/dist/worker/studio' import { ReadonlyDeep } from 'type-fest' import { WrappedShowStyleBlueprint, WrappedStudioBlueprint } from '../blueprints/cache.js' import { @@ -46,6 +46,7 @@ import { ProcessedShowStyleBase, ProcessedShowStyleCompound, ProcessedShowStyleVariant, + QueueJobOptions, } from '../jobs/index.js' import { PlaylistLock, RundownLock } from '../jobs/lock.js' import { BaseModel } from '../modelBase.js' @@ -153,9 +154,14 @@ export class MockJobContext implements JobContext { throw new Error('Method not implemented.') } async queueStudioJob( - _name: T, - _data: Parameters[0] + name: T, + _data: Parameters[0], + _options?: QueueJobOptions ): Promise { + // Silently ignore the cleanup job - it's a background task that doesn't need to run in tests + if (name === StudioJobs.CleanupOrphanedExpectedPackageReferences) { + return + } throw new Error('Method not implemented.') } async queueEventJob( diff --git a/packages/job-worker/src/blueprints/__tests__/context-OnSetAsNextContext.test.ts b/packages/job-worker/src/blueprints/__tests__/context-OnSetAsNextContext.test.ts index 475669e7a0..a81204094f 100644 --- a/packages/job-worker/src/blueprints/__tests__/context-OnSetAsNextContext.test.ts +++ b/packages/job-worker/src/blueprints/__tests__/context-OnSetAsNextContext.test.ts @@ -174,7 +174,25 @@ describe('Test blueprint api context', () => { await context.updatePartInstance('next', { title: 'My Part' } as Partial>) expect(mockActionService.updatePartInstance).toHaveBeenCalledTimes(1) - expect(mockActionService.updatePartInstance).toHaveBeenCalledWith('next', { title: 'My Part' }) + expect(mockActionService.updatePartInstance).toHaveBeenCalledWith('next', { title: 'My Part' }, {}) + }) + + test('updatePartInstance with instanceProps', async () => { + const { context, mockActionService } = await getTestee() + + await context.updatePartInstance( + 'next', + { title: 'My Part' } as Partial>, + { invalidReason: { key: 'test' } } + ) + expect(mockActionService.updatePartInstance).toHaveBeenCalledTimes(1) + expect(mockActionService.updatePartInstance).toHaveBeenCalledWith( + 'next', + { title: 'My Part' }, + { + invalidReason: { key: 'test' }, + } + ) }) test('manuallySelected when false', async () => { diff --git a/packages/job-worker/src/blueprints/__tests__/context-OnTakeContext.test.ts b/packages/job-worker/src/blueprints/__tests__/context-OnTakeContext.test.ts index bd71908fb4..705b58ad49 100644 --- a/packages/job-worker/src/blueprints/__tests__/context-OnTakeContext.test.ts +++ b/packages/job-worker/src/blueprints/__tests__/context-OnTakeContext.test.ts @@ -189,7 +189,25 @@ describe('Test blueprint api context', () => { await context.updatePartInstance('next', { title: 'My Part' } as Partial>) expect(mockActionService.updatePartInstance).toHaveBeenCalledTimes(1) - expect(mockActionService.updatePartInstance).toHaveBeenCalledWith('next', { title: 'My Part' }) + expect(mockActionService.updatePartInstance).toHaveBeenCalledWith('next', { title: 'My Part' }, {}) + }) + + test('updatePartInstance with instanceProps', async () => { + const { context, mockActionService } = await getTestee() + + await context.updatePartInstance( + 'next', + { title: 'My Part' } as Partial>, + { invalidReason: { key: 'test' } } + ) + expect(mockActionService.updatePartInstance).toHaveBeenCalledTimes(1) + expect(mockActionService.updatePartInstance).toHaveBeenCalledWith( + 'next', + { title: 'My Part' }, + { + invalidReason: { key: 'test' }, + } + ) }) }) }) diff --git a/packages/job-worker/src/blueprints/__tests__/context-adlibActions.test.ts b/packages/job-worker/src/blueprints/__tests__/context-adlibActions.test.ts index cf940dc356..5a6d49faba 100644 --- a/packages/job-worker/src/blueprints/__tests__/context-adlibActions.test.ts +++ b/packages/job-worker/src/blueprints/__tests__/context-adlibActions.test.ts @@ -158,7 +158,25 @@ describe('Test blueprint api context', () => { await context.updatePartInstance('next', { title: 'My Part' } as Partial>) expect(mockActionService.updatePartInstance).toHaveBeenCalledTimes(1) - expect(mockActionService.updatePartInstance).toHaveBeenCalledWith('next', { title: 'My Part' }) + expect(mockActionService.updatePartInstance).toHaveBeenCalledWith('next', { title: 'My Part' }, {}) + }) + + test('updatePartInstance with instanceProps', async () => { + const { context, mockActionService } = await getTestee() + + await context.updatePartInstance( + 'next', + { title: 'My Part' } as Partial>, + { invalidReason: { key: 'test' } } + ) + expect(mockActionService.updatePartInstance).toHaveBeenCalledTimes(1) + expect(mockActionService.updatePartInstance).toHaveBeenCalledWith( + 'next', + { title: 'My Part' }, + { + invalidReason: { key: 'test' }, + } + ) }) }) }) diff --git a/packages/job-worker/src/blueprints/context/OnSetAsNextContext.ts b/packages/job-worker/src/blueprints/context/OnSetAsNextContext.ts index a476c1c593..0ca4f54dad 100644 --- a/packages/job-worker/src/blueprints/context/OnSetAsNextContext.ts +++ b/packages/job-worker/src/blueprints/context/OnSetAsNextContext.ts @@ -3,6 +3,7 @@ import { ContextInfo } from './CommonContext.js' import { ShowStyleUserContext } from './ShowStyleUserContext.js' import { IBlueprintMutatablePart, + IBlueprintMutatablePartInstance, IBlueprintPart, IBlueprintPartInstance, IBlueprintPiece, @@ -124,9 +125,10 @@ export class OnSetAsNextContext async updatePartInstance( part: 'current' | 'next', - props: Partial> + props: Partial>, + instanceProps: Partial = {} ): Promise> { - return this.partAndPieceInstanceService.updatePartInstance(part, props) + return this.partAndPieceInstanceService.updatePartInstance(part, props, instanceProps) } async removePieceInstances(part: 'current' | 'next', pieceInstanceIds: string[]): Promise { diff --git a/packages/job-worker/src/blueprints/context/OnTakeContext.ts b/packages/job-worker/src/blueprints/context/OnTakeContext.ts index 9d431d9958..3438040a22 100644 --- a/packages/job-worker/src/blueprints/context/OnTakeContext.ts +++ b/packages/job-worker/src/blueprints/context/OnTakeContext.ts @@ -1,5 +1,6 @@ import { IBlueprintMutatablePart, + IBlueprintMutatablePartInstance, IBlueprintPart, IBlueprintPartInstance, IBlueprintPiece, @@ -116,9 +117,10 @@ export class OnTakeContext extends ShowStyleUserContext implements IOnTakeContex async updatePartInstance( part: 'current' | 'next', - props: Partial + props: Partial, + instanceProps: Partial = {} ): Promise { - return this.partAndPieceInstanceService.updatePartInstance(part, props) + return this.partAndPieceInstanceService.updatePartInstance(part, props, instanceProps) } async stopPiecesOnLayers(sourceLayerIds: string[], timeOffset?: number): Promise { diff --git a/packages/job-worker/src/blueprints/context/SyncIngestUpdateToPartInstanceContext.ts b/packages/job-worker/src/blueprints/context/SyncIngestUpdateToPartInstanceContext.ts index d8289be7d9..fdd7e89a56 100644 --- a/packages/job-worker/src/blueprints/context/SyncIngestUpdateToPartInstanceContext.ts +++ b/packages/job-worker/src/blueprints/context/SyncIngestUpdateToPartInstanceContext.ts @@ -13,6 +13,7 @@ import { IBlueprintPieceInstance, OmitId, IBlueprintMutatablePart, + IBlueprintMutatablePartInstance, IBlueprintPartInstance, SomeContent, WithTimeline, @@ -23,6 +24,7 @@ import { convertPieceInstanceToBlueprints, convertPartInstanceToBlueprints, convertPartialBlueprintMutablePartToCore, + convertPartialBlueprintMutatablePartInstanceToCore, } from './lib.js' import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { JobContext, JobStudio, ProcessedShowStyleCompound } from '../../jobs/index.js' @@ -166,7 +168,10 @@ export class SyncIngestUpdateToPartInstanceContext return convertPieceInstanceToBlueprints(pieceInstance.pieceInstance) } - updatePartInstance(updatePart: Partial): IBlueprintPartInstance { + updatePartInstance( + updatePart: Partial, + instanceProps: Partial = {} + ): IBlueprintPartInstance { if (!this.partInstance) throw new Error(`PartInstance has been removed`) // for autoNext, the new expectedDuration cannot be shorter than the time a part has been on-air for @@ -184,8 +189,20 @@ export class SyncIngestUpdateToPartInstanceContext updatePart, this.showStyleCompound.blueprintId ) + const playoutUpdatePartInstance = convertPartialBlueprintMutatablePartInstanceToCore( + instanceProps, + this.showStyleCompound.blueprintId + ) + + const partPropsUpdated = this.partInstance.updatePartProps(playoutUpdatePart) + let instancePropsUpdated = false + + if (playoutUpdatePartInstance) { + this.partInstance.setInvalidReason(playoutUpdatePartInstance.invalidReason) + instancePropsUpdated = true + } - if (!this.partInstance.updatePartProps(playoutUpdatePart)) { + if (!partPropsUpdated && !instancePropsUpdated) { throw new Error(`Cannot update PartInstance. Some valid properties must be defined`) } diff --git a/packages/job-worker/src/blueprints/context/__tests__/watchedPackages.test.ts b/packages/job-worker/src/blueprints/context/__tests__/watchedPackages.test.ts new file mode 100644 index 0000000000..a6543c76d9 --- /dev/null +++ b/packages/job-worker/src/blueprints/context/__tests__/watchedPackages.test.ts @@ -0,0 +1,655 @@ +import { setupDefaultJobEnvironment } from '../../../__mocks__/context.js' +import { WatchedPackagesHelper } from '../watchedPackages.js' +import { protectString } from '@sofie-automation/corelib/dist/protectedString' +import { ExpectedPackageDB, ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { PackageInfoDB } from '@sofie-automation/corelib/dist/dataModel/PackageInfos' +import { literal } from '@sofie-automation/corelib/dist/lib' +import { PackageInfo } from '@sofie-automation/blueprints-integration' +import { + ExpectedPackageId, + RundownId, + BucketId, + PeripheralDeviceId, +} from '@sofie-automation/corelib/dist/dataModel/Ids' + +describe('WatchedPackagesHelper', () => { + const mockDeviceId = protectString('device1') + describe('empty', () => { + it('creates an empty helper', () => { + const context = setupDefaultJobEnvironment() + const helper = WatchedPackagesHelper.empty(context) + + expect(helper.hasPackage(protectString('pkg1'))).toBe(false) + expect(helper.getPackageInfo('pkg1')).toEqual([]) + }) + }) + + describe('create', () => { + it('creates helper with no matching packages', async () => { + const context = setupDefaultJobEnvironment() + + const helper = await WatchedPackagesHelper.create(context, protectString('rundown1'), null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + expect(helper.hasPackage(protectString('pkg1'))).toBe(false) + }) + + it('creates helper with packages from rundown', async () => { + const context = setupDefaultJobEnvironment() + const rundownId = protectString('rundown1') + const packageId = protectString('pkg1') + + // Add expected package to the database + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: packageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package1' } as any, + ingestSources: [ + { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + blueprintPackageId: 'package1', + listenToPackageInfoUpdates: true, + } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + // Add package info + await context.mockCollections.PackageInfos.insertOne( + literal({ + _id: protectString('info1'), + studioId: context.studioId, + packageId: packageId, + deviceId: mockDeviceId, + type: PackageInfo.Type.SCAN, + expectedContentVersionHash: 'abc123', + actualContentVersionHash: 'abc123', + payload: {} as any, + }) + ) + + const helper = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + expect(helper.hasPackage(packageId)).toBe(true) + expect(helper.getPackageInfo('package1')).toHaveLength(1) + expect(helper.getPackageInfo('package1')[0].type).toBe(PackageInfo.Type.SCAN) + }) + + it('creates helper with packages from bucket', async () => { + const context = setupDefaultJobEnvironment() + const bucketId = protectString('bucket1') + const packageId = protectString('pkg1') + + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: packageId, + studioId: context.studioId, + rundownId: null, + bucketId: bucketId, + package: { _id: 'package1' } as any, + ingestSources: [ + { fromPieceType: ExpectedPackageDBType.PIECE, pieceId: protectString('piece1') } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + const helper = await WatchedPackagesHelper.create(context, null, bucketId, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + expect(helper.hasPackage(packageId)).toBe(true) + }) + + it('filters packages by ingest source', async () => { + const context = setupDefaultJobEnvironment() + const rundownId = protectString('rundown1') + + // Package with matching source + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: protectString('pkg1'), + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package1' } as any, + ingestSources: [ + { fromPieceType: ExpectedPackageDBType.PIECE, pieceId: protectString('piece1') } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + // Package with non-matching source + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: protectString('pkg2'), + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package2' } as any, + ingestSources: [ + { fromPieceType: ExpectedPackageDBType.PIECE, pieceId: protectString('piece2') } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + const helper = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + expect(helper.hasPackage(protectString('pkg1'))).toBe(true) + expect(helper.hasPackage(protectString('pkg2'))).toBe(false) + }) + + it('splits packages with multiple ingest sources', async () => { + const context = setupDefaultJobEnvironment() + const rundownId = protectString('rundown1') + const packageId = protectString('pkg1') + + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: packageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package1' } as any, + ingestSources: [ + { fromPieceType: ExpectedPackageDBType.PIECE, pieceId: protectString('piece1') } as any, + { fromPieceType: ExpectedPackageDBType.PIECE, pieceId: protectString('piece2') } as any, + ] as any, + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + // Should match both sources + const helper1 = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + expect(helper1.hasPackage(packageId)).toBe(true) + + const helper2 = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece2'), + }) + expect(helper2.hasPackage(packageId)).toBe(true) + }) + + it('does return package info for packages with listenToPackageInfoUpdates: false', async () => { + const context = setupDefaultJobEnvironment() + const rundownId = protectString('rundown1') + const packageId = protectString('pkg1') + + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: packageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package1' } as any, + ingestSources: [ + { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + blueprintPackageId: 'package1', + listenToPackageInfoUpdates: false, + } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + await context.mockCollections.PackageInfos.insertOne( + literal({ + _id: protectString('info1'), + studioId: context.studioId, + packageId: packageId, + deviceId: mockDeviceId, + type: PackageInfo.Type.SCAN, + expectedContentVersionHash: 'abc123', + actualContentVersionHash: 'abc123', + payload: {} as any, + }) + ) + + const helper = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + // Package should still be found (create doesn't filter by listenToPackageInfoUpdates) + expect(helper.hasPackage(packageId)).toBe(true) + // And package info should be available + expect(helper.getPackageInfo('package1')).toHaveLength(1) + }) + + it('handles packages with mixed listenToPackageInfoUpdates in sources', async () => { + const context = setupDefaultJobEnvironment() + const rundownId = protectString('rundown1') + const packageId = protectString('pkg1') + + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: packageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package1' } as any, + ingestSources: [ + { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + blueprintPackageId: 'package1', + listenToPackageInfoUpdates: true, + } as any, + { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece2'), + blueprintPackageId: 'package1', + listenToPackageInfoUpdates: false, + } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + // Helper with source that listens to updates should include the package + const helper1 = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + expect(helper1.hasPackage(packageId)).toBe(true) + + // Helper with source that doesn't listen to updates should also include it + const helper2 = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece2'), + }) + expect(helper2.hasPackage(packageId)).toBe(true) + }) + }) + + describe('filter', () => { + it('filters packages based on predicate', async () => { + const context = setupDefaultJobEnvironment() + const rundownId = protectString('rundown1') + + // Add multiple packages + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: protectString('pkg1'), + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package1' } as any, + ingestSources: [ + { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + blueprintPackageId: 'package1', + listenToPackageInfoUpdates: true, + } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: protectString('pkg2'), + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package2' } as any, + ingestSources: [ + { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + blueprintPackageId: 'package2', + listenToPackageInfoUpdates: true, + } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + const helper = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + // Filter to only keep pkg1 + const filtered = helper.filter(context, (pkg) => pkg.packageId === protectString('pkg1')) + + expect(filtered.hasPackage(protectString('pkg1'))).toBe(true) + expect(filtered.hasPackage(protectString('pkg2'))).toBe(false) + }) + + it('filters package infos along with packages', async () => { + const context = setupDefaultJobEnvironment() + const rundownId = protectString('rundown1') + + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: protectString('pkg1'), + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package1' } as any, + ingestSources: [ + { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + blueprintPackageId: 'package1', + listenToPackageInfoUpdates: true, + } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: protectString('pkg2'), + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package2' } as any, + ingestSources: [ + { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + blueprintPackageId: 'package2', + listenToPackageInfoUpdates: true, + } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + await context.mockCollections.PackageInfos.insertOne( + literal({ + _id: protectString('info1'), + studioId: context.studioId, + packageId: protectString('pkg1'), + deviceId: mockDeviceId, + type: PackageInfo.Type.SCAN, + expectedContentVersionHash: 'abc123', + actualContentVersionHash: 'abc123', + payload: {} as any, + }) + ) + + await context.mockCollections.PackageInfos.insertOne( + literal({ + _id: protectString('info2'), + studioId: context.studioId, + packageId: protectString('pkg2'), + deviceId: mockDeviceId, + type: PackageInfo.Type.SCAN, + expectedContentVersionHash: 'def456', + actualContentVersionHash: 'def456', + payload: {} as any, + }) + ) + + const helper = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + const filtered = helper.filter(context, (pkg) => pkg.packageId === protectString('pkg1')) + + // Should only have info for pkg1 + expect(filtered.getPackageInfo('package1')).toHaveLength(1) + expect(filtered.getPackageInfo('package2')).toHaveLength(0) + }) + }) + + describe('hasPackage', () => { + it('returns true for existing package', async () => { + const context = setupDefaultJobEnvironment() + const rundownId = protectString('rundown1') + const packageId = protectString('pkg1') + + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: packageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package1' } as any, + ingestSources: [ + { fromPieceType: ExpectedPackageDBType.PIECE, pieceId: protectString('piece1') } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + const helper = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + expect(helper.hasPackage(packageId)).toBe(true) + }) + + it('returns false for non-existing package', async () => { + const context = setupDefaultJobEnvironment() + + const helper = await WatchedPackagesHelper.create(context, protectString('rundown1'), null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + expect(helper.hasPackage(protectString('nonexistent'))).toBe(false) + }) + }) + + describe('getPackageInfo', () => { + it('returns empty array for unknown package', async () => { + const context = setupDefaultJobEnvironment() + + const helper = await WatchedPackagesHelper.create(context, protectString('rundown1'), null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + expect(helper.getPackageInfo('unknown')).toEqual([]) + }) + + it('returns package info for known package', async () => { + const context = setupDefaultJobEnvironment() + const rundownId = protectString('rundown1') + const packageId = protectString('pkg1') + + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: packageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package1' } as any, + ingestSources: [ + { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + blueprintPackageId: 'package1', + listenToPackageInfoUpdates: true, + } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + await context.mockCollections.PackageInfos.insertOne( + literal({ + _id: protectString('info1'), + studioId: context.studioId, + packageId: packageId, + deviceId: mockDeviceId, + type: PackageInfo.Type.SCAN, + expectedContentVersionHash: 'abc123', + actualContentVersionHash: 'abc123', + payload: {} as any, + }) + ) + + const helper = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + const infos = helper.getPackageInfo('package1') + expect(infos).toHaveLength(1) + expect(infos[0].type).toBe(PackageInfo.Type.SCAN) + }) + + it('returns multiple package infos for a package', async () => { + const context = setupDefaultJobEnvironment() + const rundownId = protectString('rundown1') + const packageId = protectString('pkg1') + + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: packageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package1' } as any, + ingestSources: [ + { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + blueprintPackageId: 'package1', + listenToPackageInfoUpdates: true, + } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + await context.mockCollections.PackageInfos.insertOne( + literal({ + _id: protectString('info1'), + studioId: context.studioId, + packageId: packageId, + deviceId: mockDeviceId, + type: PackageInfo.Type.SCAN, + expectedContentVersionHash: 'abc123', + actualContentVersionHash: 'abc123', + payload: {} as any, + }) + ) + + await context.mockCollections.PackageInfos.insertOne( + literal({ + _id: protectString('info2'), + studioId: context.studioId, + packageId: packageId, + deviceId: mockDeviceId, + type: PackageInfo.Type.DEEPSCAN, + expectedContentVersionHash: 'abc123', + actualContentVersionHash: 'abc123', + payload: {} as any, + }) + ) + + const helper = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + const infos = helper.getPackageInfo('package1') + expect(infos).toHaveLength(2) + expect(infos.map((i) => i.type)).toContain(PackageInfo.Type.SCAN) + expect(infos.map((i) => i.type)).toContain(PackageInfo.Type.DEEPSCAN) + }) + + it('returns empty array for package with no info', async () => { + const context = setupDefaultJobEnvironment() + const rundownId = protectString('rundown1') + const packageId = protectString('pkg1') + + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: packageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package1' } as any, + ingestSources: [ + { fromPieceType: ExpectedPackageDBType.PIECE, pieceId: protectString('piece1') } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + const helper = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + expect(helper.getPackageInfo('package1')).toEqual([]) + }) + }) +}) diff --git a/packages/job-worker/src/blueprints/context/adlibActions.ts b/packages/job-worker/src/blueprints/context/adlibActions.ts index 3eaaf728b6..a69b306acd 100644 --- a/packages/job-worker/src/blueprints/context/adlibActions.ts +++ b/packages/job-worker/src/blueprints/context/adlibActions.ts @@ -2,6 +2,7 @@ import { IActionExecutionContext, IDataStoreActionExecutionContext, IBlueprintMutatablePart, + IBlueprintMutatablePartInstance, IBlueprintPart, IBlueprintPartInstance, IBlueprintPiece, @@ -181,9 +182,10 @@ export class ActionExecutionContext extends ShowStyleUserContext implements IAct async updatePartInstance( part: 'current' | 'next', - props: Partial + props: Partial, + instanceProps: Partial = {} ): Promise { - return this.partAndPieceInstanceService.updatePartInstance(part, props) + return this.partAndPieceInstanceService.updatePartInstance(part, props, instanceProps) } async stopPiecesOnLayers(sourceLayerIds: string[], timeOffset?: number): Promise { diff --git a/packages/job-worker/src/blueprints/context/lib.ts b/packages/job-worker/src/blueprints/context/lib.ts index b9cad9d2af..9a36ae50c3 100644 --- a/packages/job-worker/src/blueprints/context/lib.ts +++ b/packages/job-worker/src/blueprints/context/lib.ts @@ -1,6 +1,6 @@ import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' -import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' +import { DBPart, PartInvalidReason } from '@sofie-automation/corelib/dist/dataModel/Part' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { deserializePieceTimelineObjectsBlob, @@ -35,6 +35,7 @@ import { IBlueprintAdLibPieceDB, IBlueprintConfig, IBlueprintMutatablePart, + IBlueprintMutatablePartInstance, IBlueprintPartDB, IBlueprintPartInstance, IBlueprintPiece, @@ -51,6 +52,7 @@ import { IBlueprintShowStyleVariant, IOutputLayer, ISourceLayer, + NoteSeverity, PieceAbSessionInfo, RundownPlaylistTiming, } from '@sofie-automation/blueprints-integration' @@ -209,6 +211,7 @@ export function convertPartInstanceToBlueprints(partInstance: ReadonlyDeep privateData: clone(action.privateData), publicData: clone(action.publicData), partId: unprotectString(action.partId), + invalid: action.invalid, allVariants: action.allVariants, userDataManifest: clone(action.userDataManifest), display: clone(action.display), // TODO - type mismatch @@ -553,27 +557,28 @@ function translateUserEditsToBlueprint( userEdits.map((userEdit) => { switch (userEdit.type) { case UserEditingType.ACTION: - return { + return literal({ type: UserEditingType.ACTION, id: userEdit.id, label: omit(userEdit.label, 'namespaces'), icon: userEdit.icon, iconInactive: userEdit.iconInactive, isActive: userEdit.isActive, - } satisfies Complete + }) case UserEditingType.FORM: - return { + return literal({ type: UserEditingType.FORM, id: userEdit.id, label: omit(userEdit.label, 'namespaces'), schema: clone(userEdit.schema), currentValues: clone(userEdit.currentValues), - } satisfies Complete + }) case UserEditingType.SOFIE: - return { + return literal({ type: UserEditingType.SOFIE, id: userEdit.id, - } satisfies Complete + limitToCurrentPart: userEdit.limitToCurrentPart, + }) default: assertNever(userEdit) return undefined @@ -615,28 +620,29 @@ export function translateUserEditsFromBlueprint( userEdits.map((userEdit) => { switch (userEdit.type) { case UserEditingType.ACTION: - return { + return literal({ type: UserEditingType.ACTION, id: userEdit.id, label: wrapTranslatableMessageFromBlueprints(userEdit.label, blueprintIds), icon: userEdit.icon, iconInactive: userEdit.iconInactive, isActive: userEdit.isActive, - } satisfies Complete + }) case UserEditingType.FORM: - return { + return literal({ type: UserEditingType.FORM, id: userEdit.id, label: wrapTranslatableMessageFromBlueprints(userEdit.label, blueprintIds), schema: clone(userEdit.schema), currentValues: clone(userEdit.currentValues), translationNamespaces: unprotectStringArray(blueprintIds), - } satisfies Complete + }) case UserEditingType.SOFIE: - return { + return literal({ type: UserEditingType.SOFIE, id: userEdit.id, - } satisfies Complete + limitToCurrentPart: userEdit.limitToCurrentPart, + }) default: assertNever(userEdit) return undefined @@ -701,6 +707,39 @@ export function convertPartialBlueprintMutablePartToCore( return playoutUpdatePart } + +export interface PlayoutMutatablePartInstance extends Omit { + invalidReason?: PartInvalidReason +} + +/** + * Converts a partial IBlueprintMutatablePartInstance and wraps translatable messages with blueprint namespace + */ +export function convertPartialBlueprintMutatablePartInstanceToCore( + instanceProps: Partial, + blueprintId: BlueprintId +): Partial { + const result: Partial = { + ...instanceProps, + invalidReason: undefined, + } + + if (instanceProps.invalidReason) { + result.invalidReason = { + message: wrapTranslatableMessageFromBlueprints(instanceProps.invalidReason, [blueprintId]), + severity: NoteSeverity.ERROR, + } + } else if ('invalidReason' in instanceProps) { + // Explicitly clearing invalidReason + result.invalidReason = undefined + } else { + // Not touching invalidReason at all + delete result.invalidReason + } + + return result +} + export function createBlueprintQuickLoopInfo(playlist: ReadonlyDeep): BlueprintQuickLookInfo | null { const playlistLoopProps = playlist.quickLoop if (!playlistLoopProps) return null diff --git a/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts b/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts index f18e2e3a0c..28b1b80641 100644 --- a/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts +++ b/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts @@ -3,6 +3,7 @@ import { PlayoutModel } from '../../../playout/model/PlayoutModel.js' import { PlayoutPartInstanceModel } from '../../../playout/model/PlayoutPartInstanceModel.js' import { IBlueprintMutatablePart, + IBlueprintMutatablePartInstance, IBlueprintPart, IBlueprintPartInstance, IBlueprintPiece, @@ -20,6 +21,7 @@ import { convertPartInstanceToBlueprints, convertPartToBlueprints, convertPartialBlueprintMutablePartToCore, + convertPartialBlueprintMutatablePartInstanceToCore, convertPieceInstanceToBlueprints, convertPieceToBlueprints, convertResolvedPieceInstanceToBlueprints, @@ -355,7 +357,8 @@ export class PartAndPieceInstanceActionService { async updatePartInstance( part: 'current' | 'next', - props: Partial + props: Partial, + instanceProps: Partial ): Promise { const partInstance = this.#getPartInstance(part) if (!partInstance) { @@ -363,8 +366,23 @@ export class PartAndPieceInstanceActionService { } const playoutUpdatePart = convertPartialBlueprintMutablePartToCore(props, this.showStyleCompound.blueprintId) + const playoutUpdatePartInstance = convertPartialBlueprintMutatablePartInstanceToCore( + instanceProps, + this.showStyleCompound.blueprintId + ) + + const partPropsUpdated = partInstance.updatePartProps(playoutUpdatePart) + let instancePropsUpdated = false + + if (playoutUpdatePartInstance && 'invalidReason' in playoutUpdatePartInstance) { + if (part !== 'next') { + throw new Error(`Can only set invalidReason on the next PartInstance`) + } + partInstance.setInvalidReason(playoutUpdatePartInstance.invalidReason) + instancePropsUpdated = true + } - if (!partInstance.updatePartProps(playoutUpdatePart)) { + if (!partPropsUpdated && !instancePropsUpdated) { throw new Error('Some valid properties must be defined') } diff --git a/packages/job-worker/src/blueprints/context/services/__tests__/PartAndPieceInstanceActionService.test.ts b/packages/job-worker/src/blueprints/context/services/__tests__/PartAndPieceInstanceActionService.test.ts index c616e73f51..a3e6297658 100644 --- a/packages/job-worker/src/blueprints/context/services/__tests__/PartAndPieceInstanceActionService.test.ts +++ b/packages/job-worker/src/blueprints/context/services/__tests__/PartAndPieceInstanceActionService.test.ts @@ -4,6 +4,7 @@ import { IBlueprintPart, IBlueprintPiece, IBlueprintPieceType, + NoteSeverity, PieceLifespan, } from '@sofie-automation/blueprints-integration' import { PlayoutModel } from '../../../../playout/model/PlayoutModel.js' @@ -1477,6 +1478,7 @@ describe('Test blueprint api context', () => { expect(resultPiece).toEqual(convertPieceInstanceToBlueprints(pieceInstance1.pieceInstance)) const pieceInstance0After = { ...pieceInstance0Before, + neededExpectedPackageIds: [], piece: { ...pieceInstance0Before.piece, ...omit(pieceInstance0Delta, 'badProperty', '_id'), @@ -1763,7 +1765,7 @@ describe('Test blueprint api context', () => { await wrapWithPlayoutModel(jobContext, playlistId, async (playoutModel) => { const { service } = await getTestee(jobContext, playoutModel) - await expect(service.updatePartInstance('current', { title: 'new' })).rejects.toThrow( + await expect(service.updatePartInstance('current', { title: 'new' }, {})).rejects.toThrow( 'PartInstance could not be found' ) }) @@ -1772,17 +1774,17 @@ describe('Test blueprint api context', () => { await setPartInstances(jobContext, playlistId, partInstance, undefined) await wrapWithPlayoutModel(jobContext, playlistId, async (playoutModel) => { const { service } = await getTestee(jobContext, playoutModel) - await expect(service.updatePartInstance('current', {})).rejects.toThrow( + await expect(service.updatePartInstance('current', {}, {})).rejects.toThrow( 'Some valid properties must be defined' ) await expect( - service.updatePartInstance('current', { _id: 'bad', nope: 'ok' } as any) + service.updatePartInstance('current', { _id: 'bad', nope: 'ok' } as any, {}) ).rejects.toThrow('Some valid properties must be defined') - await expect(service.updatePartInstance('next', { title: 'new' })).rejects.toThrow( + await expect(service.updatePartInstance('next', { title: 'new' }, {})).rejects.toThrow( 'PartInstance could not be found' ) - await service.updatePartInstance('current', { title: 'new' }) + await service.updatePartInstance('current', { title: 'new' }, {}) }) }) test('good', async () => { @@ -1810,7 +1812,7 @@ describe('Test blueprint api context', () => { classes: ['123'], badProperty: 9, // This will be dropped } - const resultPart = await service.updatePartInstance('next', partInstance0Delta) + const resultPart = await service.updatePartInstance('next', partInstance0Delta, {}) const partInstance1 = playoutModel.nextPartInstance! as PlayoutPartInstanceModelImpl expect(partInstance1).toBeTruthy() @@ -1831,6 +1833,54 @@ describe('Test blueprint api context', () => { expect(service.currentPartState).toEqual(ActionPartChange.NONE) }) }) + test('invalidReason on current - throws error', async () => { + const { jobContext, playlistId, rundownId } = await setupMyDefaultRundown() + + const partInstance = (await jobContext.mockCollections.PartInstances.findOne({ + rundownId, + })) as DBPartInstance + expect(partInstance).toBeTruthy() + + // Set a current part instance + await setPartInstances(jobContext, playlistId, partInstance, undefined) + await wrapWithPlayoutModel(jobContext, playlistId, async (playoutModel) => { + const { service } = await getTestee(jobContext, playoutModel) + + await expect( + service.updatePartInstance('current', {}, { invalidReason: { key: 'test' } }) + ).rejects.toThrow('Can only set invalidReason on the next PartInstance') + }) + }) + test('invalidReason on next - sets and clears', async () => { + const { jobContext, playlistId, rundownId } = await setupMyDefaultRundown() + + const partInstance = (await jobContext.mockCollections.PartInstances.findOne({ + rundownId, + })) as DBPartInstance + expect(partInstance).toBeTruthy() + + // Set as next part instance + await setPartInstances(jobContext, playlistId, undefined, partInstance) + await wrapWithPlayoutModel(jobContext, playlistId, async (playoutModel) => { + const { service } = await getTestee(jobContext, playoutModel) + + // Set invalidReason + const invalidReason = { key: 'test_error', args: { foo: 'bar' } } + await service.updatePartInstance('next', {}, { invalidReason }) + const partInstance1 = playoutModel.nextPartInstance! as PlayoutPartInstanceModelImpl + expect(partInstance1.partInstance.invalidReason).toEqual({ + message: { + ...invalidReason, + namespaces: [expect.any(String)], + }, + severity: NoteSeverity.ERROR, + }) + + // Clear invalidReason + await service.updatePartInstance('next', {}, { invalidReason: undefined }) + expect(partInstance1.partInstance.invalidReason).toBeUndefined() + }) + }) }) }) }) diff --git a/packages/job-worker/src/blueprints/context/watchedPackages.ts b/packages/job-worker/src/blueprints/context/watchedPackages.ts index 29d1e8901f..7c690f45c0 100644 --- a/packages/job-worker/src/blueprints/context/watchedPackages.ts +++ b/packages/job-worker/src/blueprints/context/watchedPackages.ts @@ -1,29 +1,31 @@ -import { - ExpectedPackageDB, - ExpectedPackageDBBase, - ExpectedPackageFromRundown, -} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { PackageInfoDB } from '@sofie-automation/corelib/dist/dataModel/PackageInfos' import { JobContext } from '../../jobs/index.js' -import { ExpectedPackageId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { BucketId, ExpectedPackageId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { Filter as FilterQuery } from 'mongodb' import { PackageInfo } from '@sofie-automation/blueprints-integration' import { unprotectObjectArray } from '@sofie-automation/corelib/dist/protectedString' -import { ExpectedPackageForIngestModel, IngestModelReadonly } from '../../ingest/model/IngestModel.js' +import { IngestModelReadonly } from '../../ingest/model/IngestModel.js' import { ReadonlyDeep } from 'type-fest' +import type { IngestExpectedPackage } from '../../ingest/model/IngestExpectedPackage.js' +import type { ExpectedPackageIngestSource } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' /** * This is a helper class to simplify exposing packageInfo to various places in the blueprints */ export class WatchedPackagesHelper { - private readonly packages = new Map>() + private readonly packages = new Map< + ExpectedPackageId, + ReadonlyDeep>[] + >() private constructor( - packages: ReadonlyDeep, + packages: ReadonlyDeep[]>, private readonly packageInfos: ReadonlyDeep ) { for (const pkg of packages) { - this.packages.set(pkg._id, pkg) + const arr = this.packages.get(pkg.packageId) || [] + arr.push(pkg) + this.packages.set(pkg.packageId, arr) } } @@ -39,21 +41,41 @@ export class WatchedPackagesHelper { * @param studioId The studio this is for * @param filter A mongo query to specify the packages that should be included */ - static async create( + static async create( context: JobContext, - filter: FilterQuery> + rundownId: RundownId | null, + bucketId: BucketId | null, + filterIngestSources: FilterQuery ): Promise { // Load all the packages and the infos that are watched const watchedPackages = await context.directCollections.ExpectedPackages.findFetch({ - ...filter, studioId: context.studioId, - } as any) // TODO: don't use any here + rundownId: rundownId, + bucketId: bucketId, + ingestSources: { + $elemMatch: filterIngestSources, + }, + }) const watchedPackageInfos = await context.directCollections.PackageInfos.findFetch({ studioId: context.studioId, packageId: { $in: watchedPackages.map((p) => p._id) }, }) - return new WatchedPackagesHelper(watchedPackages, watchedPackageInfos) + const watchedIngestPackages: IngestExpectedPackage[] = watchedPackages.flatMap( + (expectedPackage) => { + // Split into a package per source + return expectedPackage.ingestSources.map( + (source) => + ({ + packageId: expectedPackage._id, + package: expectedPackage.package, + source: source, + }) satisfies IngestExpectedPackage + ) + } + ) + + return new WatchedPackagesHelper(watchedIngestPackages, watchedPackageInfos) } /** @@ -65,7 +87,7 @@ export class WatchedPackagesHelper { context: JobContext, ingestModel: IngestModelReadonly ): Promise { - const packages: ReadonlyDeep[] = [] + const packages: ReadonlyDeep[] = [] packages.push(...ingestModel.expectedPackagesForRundownBaseline) @@ -77,7 +99,7 @@ export class WatchedPackagesHelper { return this.#createFromPackages( context, - packages.filter((pkg) => !!pkg.listenToPackageInfoUpdates) + packages.filter((pkg) => !!pkg.source.listenToPackageInfoUpdates) ) } @@ -92,7 +114,7 @@ export class WatchedPackagesHelper { ingestModel: IngestModelReadonly, segmentExternalIds: string[] ): Promise { - const packages: ReadonlyDeep[] = [] + const packages: ReadonlyDeep[] = [] for (const externalId of segmentExternalIds) { const segment = ingestModel.getSegmentByExternalId(externalId) @@ -105,17 +127,17 @@ export class WatchedPackagesHelper { return this.#createFromPackages( context, - packages.filter((pkg) => !!pkg.listenToPackageInfoUpdates) + packages.filter((pkg) => !!pkg.source.listenToPackageInfoUpdates) ) } - static async #createFromPackages(context: JobContext, packages: ReadonlyDeep[]) { + static async #createFromPackages(context: JobContext, packages: ReadonlyDeep[]) { // Load all the packages and the infos that are watched const watchedPackageInfos = packages.length > 0 ? await context.directCollections.PackageInfos.findFetch({ studioId: context.studio._id, - packageId: { $in: packages.map((p) => p._id) }, + packageId: { $in: packages.map((p) => p.packageId) }, }) : [] @@ -124,30 +146,41 @@ export class WatchedPackagesHelper { /** * Create a new helper with a subset of the data in the current helper. - * This is useful so that all the data for a rundown can be loaded at the start of an ingest operation, and then subsets can be taken for particular blueprint methods without needing to do more db operations. + * This is useful so that all the data for a rundown can be loaded at the start of an ingest operation, + * and then subsets can be taken for particular blueprint methods without needing to do more db operations. * @param func A filter to check if each package should be included */ - filter(_context: JobContext, func: (pkg: ReadonlyDeep) => boolean): WatchedPackagesHelper { - const watchedPackages: ReadonlyDeep[] = [] - for (const pkg of this.packages.values()) { - if (func(pkg)) watchedPackages.push(pkg) + filter( + _context: JobContext, + func: (pkg: ReadonlyDeep>) => boolean + ): WatchedPackagesHelper { + const watchedPackages: ReadonlyDeep>[] = [] + for (const packages of this.packages.values()) { + for (const pkg of packages) { + if (func(pkg)) watchedPackages.push(pkg) + } } - const newPackageIds = new Set(watchedPackages.map((p) => p._id)) + const newPackageIds = new Set(watchedPackages.map((p) => p.packageId)) const watchedPackageInfos = this.packageInfos.filter((info) => newPackageIds.has(info.packageId)) return new WatchedPackagesHelper(watchedPackages, watchedPackageInfos) } - getPackage(packageId: ExpectedPackageId): ReadonlyDeep | undefined { - return this.packages.get(packageId) + hasPackage(packageId: ExpectedPackageId): boolean { + return this.packages.has(packageId) } - getPackageInfo(packageId: string): Readonly> { - for (const pkg of this.packages.values()) { - if (pkg.blueprintPackageId === packageId) { - const info = this.packageInfos.filter((p) => p.packageId === pkg._id) - return unprotectObjectArray(info) + getPackageInfo(blueprintPackageId: string): Readonly> { + // Perhaps this should do some scoped source checks, but this should not be necessary. + // The caller should be ensuring that this helper has been filtered to only contain relevant packages + for (const packages of this.packages.values()) { + for (const pkg of packages) { + // Note: This finds the first package with the same blueprintPackageId. There could be multiple if the blueprints don't respect the uniqueness rules. + if (pkg.source.blueprintPackageId === blueprintPackageId) { + const info = this.packageInfos.filter((p) => p.packageId === pkg.packageId) + return unprotectObjectArray(info) + } } } diff --git a/packages/job-worker/src/blueprints/postProcess.ts b/packages/job-worker/src/blueprints/postProcess.ts index 5c17bb1a3c..e3d6075fea 100644 --- a/packages/job-worker/src/blueprints/postProcess.ts +++ b/packages/job-worker/src/blueprints/postProcess.ts @@ -42,7 +42,7 @@ import { interpollateTranslation, wrapTranslatableMessageFromBlueprints, } from '@sofie-automation/corelib/dist/TranslatableMessage' -import { setDefaultIdOnExpectedPackages } from '../ingest/expectedPackages.js' +import { sanitiseExpectedPackages } from '../ingest/expectedPackages.js' import { logger } from '../logging.js' import { validateTimeline } from 'superfly-timeline' import { ReadonlyDeep } from 'type-fest' @@ -137,8 +137,8 @@ export function postProcessPieces( ) piece.timelineObjectsString = serializePieceTimelineObjectsBlob(timelineObjects) - // Fill in ids of unnamed expectedPackages - setDefaultIdOnExpectedPackages(piece.expectedPackages) + // Fill in contentVersionHash of expectedPackages + sanitiseExpectedPackages(piece.expectedPackages) return piece }) @@ -267,8 +267,8 @@ export function postProcessAdLibPieces( ) piece.timelineObjectsString = serializePieceTimelineObjectsBlob(timelineObjects) - // Fill in ids of unnamed expectedPackages - setDefaultIdOnExpectedPackages(piece.expectedPackages) + // Fill in contentVersionHash of expectedPackages + sanitiseExpectedPackages(piece.expectedPackages) return piece }) @@ -304,8 +304,8 @@ export function postProcessGlobalAdLibActions( `${rundownId}_${blueprintId}_global_adlib_action_${action.externalId}` ) - // Fill in ids of unnamed expectedPackages - setDefaultIdOnExpectedPackages(action.expectedPackages) + // Fill in contentVersionHash of expectedPackages + sanitiseExpectedPackages(action.expectedPackages) return literal({ ...action, @@ -345,8 +345,8 @@ export function postProcessAdLibActions( `${rundownId}_${blueprintId}_${partId}_adlib_action_${action.externalId}` ) - // Fill in ids of unnamed expectedPackages - setDefaultIdOnExpectedPackages(action.expectedPackages) + // Fill in contentVersionHash of expectedPackages + sanitiseExpectedPackages(action.expectedPackages) return literal({ ...action, @@ -428,8 +428,8 @@ export function postProcessGlobalPieces( ) piece.timelineObjectsString = serializePieceTimelineObjectsBlob(timelineObjects) - // Fill in ids of unnamed expectedPackages - setDefaultIdOnExpectedPackages(piece.expectedPackages) + // Fill in contentVersionHash of expectedPackages + sanitiseExpectedPackages(piece.expectedPackages) return piece }) @@ -504,8 +504,8 @@ export function postProcessBucketAdLib( name: name || itemOrig.name, timelineObjectsString: EmptyPieceTimelineObjectsBlob, } - // Fill in ids of unnamed expectedPackages - setDefaultIdOnExpectedPackages(piece.expectedPackages) + // Fill in contentVersionHash of expectedPackages + sanitiseExpectedPackages(piece.expectedPackages) const timelineObjects = postProcessTimelineObjects(piece._id, blueprintId, itemOrig.content.timelineObjects) piece.timelineObjectsString = serializePieceTimelineObjectsBlob(timelineObjects) @@ -553,8 +553,8 @@ export function postProcessBucketAction( ...processAdLibActionITranslatableMessages(itemOrig, blueprintId, rank, label), } - // Fill in ids of unnamed expectedPackages - setDefaultIdOnExpectedPackages(action.expectedPackages) + // Fill in contentVersionHash of expectedPackages + sanitiseExpectedPackages(action.expectedPackages) return action } diff --git a/packages/job-worker/src/ingest/__tests__/expectedPackages.test.ts b/packages/job-worker/src/ingest/__tests__/expectedPackages.test.ts index 8b5cb99a12..d85cbbcb7c 100644 --- a/packages/job-worker/src/ingest/__tests__/expectedPackages.test.ts +++ b/packages/job-worker/src/ingest/__tests__/expectedPackages.test.ts @@ -6,12 +6,12 @@ import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { defaultPart, defaultPiece, defaultAdLibPiece } from '../../__mocks__/defaultCollectionObjects.js' import { LAYER_IDS } from '../../__mocks__/presetCollections.js' import { ExpectedPackage, PieceLifespan, VTContent } from '@sofie-automation/blueprints-integration' -import { updateExpectedPackagesForPartModel } from '../expectedPackages.js' +import { updateExpectedMediaAndPlayoutItemsForPartModel } from '../expectedPackages.js' import { MockJobContext, setupDefaultJobEnvironment } from '../../__mocks__/context.js' import { ReadonlyDeep } from 'type-fest' import { IngestPartModel } from '../model/IngestPartModel.js' -describe('Expected Media Items', () => { +describe('Expected Playout Items', () => { let context: MockJobContext beforeAll(async () => { context = setupDefaultJobEnvironment() @@ -111,9 +111,8 @@ describe('Expected Media Items', () => { return { part, pieces, adLibPieces } } - test('Generates ExpectedPackages for a Part', async () => { + test('Generates for a Part', async () => { const setExpectedPlayoutItems = jest.fn() - const setExpectedPackages = jest.fn() const { part, pieces, adLibPieces } = getMockPartContent() @@ -126,16 +125,12 @@ describe('Expected Media Items', () => { expectedPackages: [], setExpectedPlayoutItems, - setExpectedPackages, setInvalid: function (_invalid: boolean): void { throw new Error('Function not implemented.') }, } - updateExpectedPackagesForPartModel(context, partModel) - - expect(setExpectedPackages).toHaveBeenCalledTimes(1) - expect(setExpectedPackages.mock.calls[0][0]).toHaveLength(4) + updateExpectedMediaAndPlayoutItemsForPartModel(context, partModel) expect(setExpectedPlayoutItems).toHaveBeenCalledTimes(1) expect(setExpectedPlayoutItems).toHaveBeenCalledWith([]) diff --git a/packages/job-worker/src/ingest/bucket/bucketAdlibs.ts b/packages/job-worker/src/ingest/bucket/bucketAdlibs.ts index 65c5973dcc..8217bd46b0 100644 --- a/packages/job-worker/src/ingest/bucket/bucketAdlibs.ts +++ b/packages/job-worker/src/ingest/bucket/bucketAdlibs.ts @@ -10,14 +10,12 @@ import { } from '@sofie-automation/corelib/dist/worker/ingest' import { cleanUpExpectedPackagesForBucketAdLibs, - cleanUpExpectedPackagesForBucketAdLibsActions, updateExpectedPackagesForBucketAdLibPiece, updateExpectedPackagesForBucketAdLibAction, } from '../expectedPackages.js' import { omit } from '@sofie-automation/corelib/dist/lib' import { BucketAdLib } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibPiece' import { BucketAdLibAction } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibAction' -import { ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { MongoQuery } from '../../db/index.js' export async function handleBucketRemoveAdlibPiece( @@ -34,7 +32,7 @@ export async function handleBucketRemoveAdlibPiece( await Promise.all([ context.directCollections.BucketAdLibPieces.remove({ _id: { $in: idsToUpdate } }), - cleanUpExpectedPackagesForBucketAdLibs(context, idsToUpdate), + cleanUpExpectedPackagesForBucketAdLibs(context, piece.bucketId, idsToUpdate), ]) } @@ -52,7 +50,7 @@ export async function handleBucketRemoveAdlibAction( await Promise.all([ context.directCollections.BucketAdLibActions.remove({ _id: { $in: idsToUpdate } }), - cleanUpExpectedPackagesForBucketAdLibsActions(context, idsToUpdate), + cleanUpExpectedPackagesForBucketAdLibs(context, action.bucketId, idsToUpdate), ]) } @@ -64,12 +62,6 @@ export async function handleBucketEmpty(context: JobContext, data: BucketEmptyPr context.directCollections.BucketAdLibActions.remove({ bucketId: id, studioId: context.studioId }), context.directCollections.ExpectedPackages.remove({ studioId: context.studioId, - fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB, - bucketId: id, - }), - context.directCollections.ExpectedPackages.remove({ - studioId: context.studioId, - fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION, bucketId: id, }), ]) diff --git a/packages/job-worker/src/ingest/bucket/import.ts b/packages/job-worker/src/ingest/bucket/import.ts index 569bd05e78..8d6ca518ca 100644 --- a/packages/job-worker/src/ingest/bucket/import.ts +++ b/packages/job-worker/src/ingest/bucket/import.ts @@ -12,7 +12,6 @@ import { getSystemVersion } from '../../lib/index.js' import { BucketItemImportProps, BucketItemRegenerateProps } from '@sofie-automation/corelib/dist/worker/ingest' import { cleanUpExpectedPackagesForBucketAdLibs, - cleanUpExpectedPackagesForBucketAdLibsActions, updateExpectedPackagesForBucketAdLibPiece, updateExpectedPackagesForBucketAdLibAction, } from '../expectedPackages.js' @@ -155,7 +154,13 @@ async function regenerateBucketItemFromIngestInfo( if (!showStyleCompound) throw new Error(`Unable to create a ShowStyleCompound for ${showStyleBase._id}, ${showStyleVariant._id} `) - const rawAdlib = await generateBucketAdlibForVariant(context, blueprint, showStyleCompound, ingestInfo.payload) + const rawAdlib = await generateBucketAdlibForVariant( + context, + blueprint, + showStyleCompound, + bucketId, + ingestInfo.payload + ) if (rawAdlib) { const importVersions: RundownImportVersions = { @@ -229,7 +234,7 @@ async function regenerateBucketItemFromIngestInfo( const adlibIdsToRemoveArray = Array.from(adlibIdsToRemove) ps.push( - cleanUpExpectedPackagesForBucketAdLibs(context, adlibIdsToRemoveArray), + cleanUpExpectedPackagesForBucketAdLibs(context, bucketId, adlibIdsToRemoveArray), context.directCollections.BucketAdLibPieces.remove({ _id: { $in: adlibIdsToRemoveArray } }) ) } @@ -237,7 +242,7 @@ async function regenerateBucketItemFromIngestInfo( const actionIdsToRemoveArray = Array.from(actionIdsToRemove) ps.push( - cleanUpExpectedPackagesForBucketAdLibsActions(context, actionIdsToRemoveArray), + cleanUpExpectedPackagesForBucketAdLibs(context, bucketId, actionIdsToRemoveArray), context.directCollections.BucketAdLibActions.remove({ _id: { $in: actionIdsToRemoveArray } }) ) } @@ -248,17 +253,18 @@ async function generateBucketAdlibForVariant( context: JobContext, blueprint: ReadonlyDeep, showStyleCompound: ReadonlyDeep, + bucketId: BucketId, // pieceId: BucketAdLibId | BucketAdLibActionId, payload: IngestAdlib ): Promise { if (!blueprint.blueprint.getAdlibItem) return null - const watchedPackages = await WatchedPackagesHelper.create(context, { - // We don't know what the `pieceId` will be, but we do know the `externalId` - pieceExternalId: payload.externalId, + const watchedPackages = await WatchedPackagesHelper.create(context, null, bucketId, { fromPieceType: { $in: [ExpectedPackageDBType.BUCKET_ADLIB, ExpectedPackageDBType.BUCKET_ADLIB_ACTION], }, + // We don't know what the `pieceId` will be, but we do know the `externalId` + pieceExternalId: payload.externalId, }) const contextForVariant = new ShowStyleUserContext( diff --git a/packages/job-worker/src/ingest/commit.ts b/packages/job-worker/src/ingest/commit.ts index c017861d36..47e26f850c 100644 --- a/packages/job-worker/src/ingest/commit.ts +++ b/packages/job-worker/src/ingest/commit.ts @@ -19,7 +19,7 @@ import { removeRundownFromDb, } from '../rundownPlaylists.js' import { ReadonlyDeep } from 'type-fest' -import { IngestModel, IngestModelReadonly } from './model/IngestModel.js' +import { IngestDatabasePersistedModel, IngestModel, IngestModelReadonly } from './model/IngestModel.js' import { JobContext } from '../jobs/index.js' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' @@ -40,7 +40,6 @@ import { PlayoutRundownModelImpl } from '../playout/model/implementation/Playout import { PlayoutSegmentModelImpl } from '../playout/model/implementation/PlayoutSegmentModelImpl.js' import { createPlayoutModelFromIngestModel } from '../playout/model/implementation/LoadPlayoutModel.js' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' -import { DatabasePersistedModel } from '../modelBase.js' import { updateSegmentIdsForAdlibbedPartInstances } from './commit/updateSegmentIdsForAdlibbedPartInstances.js' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' import { AnyBulkWriteOperation } from 'mongodb' @@ -64,7 +63,7 @@ interface PlaylistIdPair { */ export async function CommitIngestOperation( context: JobContext, - ingestModel: IngestModel & DatabasePersistedModel, + ingestModel: IngestModel & IngestDatabasePersistedModel, beforeRundown: ReadonlyDeep | undefined, beforePartMap: BeforeIngestOperationPartMap, data: ReadonlyDeep @@ -223,7 +222,7 @@ export async function CommitIngestOperation( ) // Start the save - const pSaveIngest = ingestModel.saveAllToDatabase() + const pSaveIngest = ingestModel.saveAllToDatabase(playlistLock) pSaveIngest.catch(() => null) // Ensure promise isn't reported as unhandled await validateAdlibTestingSegment(context, playoutModel) diff --git a/packages/job-worker/src/ingest/expectedPackages.ts b/packages/job-worker/src/ingest/expectedPackages.ts index b49d9e993e..4a2479d23f 100644 --- a/packages/job-worker/src/ingest/expectedPackages.ts +++ b/packages/job-worker/src/ingest/expectedPackages.ts @@ -1,36 +1,14 @@ -import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' -import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' import { BucketAdLibAction } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibAction' import { BucketAdLib } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibPiece' import { ExpectedPackageDBType, - ExpectedPackageDBFromPiece, - ExpectedPackageDBFromBaselineAdLibPiece, - ExpectedPackageDBFromAdLibAction, - ExpectedPackageDBFromBaselineAdLibAction, - ExpectedPackageDBFromBucketAdLib, - ExpectedPackageDBFromBucketAdLibAction, - ExpectedPackageDBBase, - ExpectedPackageDBFromRundownBaselineObjects, - ExpectedPackageDBFromStudioBaselineObjects, - getContentVersionHash, + ExpectedPackageDB, + ExpectedPackageIngestSource, getExpectedPackageId, - ExpectedPackageFromRundown, + ExpectedPackageIngestSourceBucketAdlibAction, + ExpectedPackageIngestSourceBucketAdlibPiece, } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' -import { - SegmentId, - RundownId, - AdLibActionId, - PieceId, - RundownBaselineAdLibActionId, - BucketAdLibActionId, - BucketAdLibId, - StudioId, -} from '@sofie-automation/corelib/dist/dataModel/Ids' -import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' -import { RundownBaselineAdLibAction } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibAction' -import { RundownBaselineAdLibItem } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibPiece' -import { saveIntoDb } from '../db/changes.js' +import { BucketId, BucketAdLibId, BucketAdLibActionId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { PlayoutModel } from '../playout/model/PlayoutModel.js' import { StudioPlayoutModel } from '../studio/model/StudioPlayoutModel.js' import { ReadonlyDeep } from 'type-fest' @@ -41,336 +19,247 @@ import { updateExpectedPlayoutItemsForRundownBaseline, } from './expectedPlayoutItems.js' import { JobContext, JobStudio } from '../jobs/index.js' -import { ExpectedPackageForIngestModelBaseline, IngestModel } from './model/IngestModel.js' +import { IngestModel } from './model/IngestModel.js' import { IngestPartModel } from './model/IngestPartModel.js' -import { clone } from '@sofie-automation/corelib/dist/lib' +import { hashObj } from '@sofie-automation/corelib/dist/lib' +import { AnyBulkWriteOperation } from 'mongodb' -export function updateExpectedPackagesForPartModel(context: JobContext, part: IngestPartModel): void { +export function updateExpectedMediaAndPlayoutItemsForPartModel(context: JobContext, part: IngestPartModel): void { updateExpectedPlayoutItemsForPartModel(context, part) - - const expectedPackages: ExpectedPackageFromRundown[] = [ - ...generateExpectedPackagesForPiece( - context.studio, - part.part.rundownId, - part.part.segmentId, - part.pieces, - ExpectedPackageDBType.PIECE - ), - ...generateExpectedPackagesForPiece( - context.studio, - part.part.rundownId, - part.part.segmentId, - part.adLibPieces, - ExpectedPackageDBType.ADLIB_PIECE - ), - ...generateExpectedPackagesForAdlibAction( - context.studio, - part.part.rundownId, - part.part.segmentId, - part.adLibActions - ), - ] - - part.setExpectedPackages(expectedPackages) } -export async function updateExpectedPackagesForRundownBaseline( +export async function updateExpectedMediaAndPlayoutItemsForRundownBaseline( context: JobContext, ingestModel: IngestModel, - baseline: BlueprintResultBaseline | undefined, - forceBaseline = false + baseline: BlueprintResultBaseline | undefined ): Promise { await updateExpectedPlayoutItemsForRundownBaseline(context, ingestModel, baseline) +} - const expectedPackages: ExpectedPackageForIngestModelBaseline[] = [] - - const preserveTypesDuringSave = new Set() +function generateExpectedPackagesForBucketAdlib(studio: ReadonlyDeep, adlib: BucketAdLib) { + const packages: ExpectedPackageDB[] = [] - // Only regenerate the baseline types if they are already loaded into memory - // If the data isn't already loaded, then we haven't made any changes to the baseline adlibs - // This means we can skip regenerating them as it is guaranteed there will be no changes - const baselineAdlibPieceCache = forceBaseline - ? await ingestModel.rundownBaselineAdLibPieces.get() - : ingestModel.rundownBaselineAdLibPieces.getIfLoaded() - if (baselineAdlibPieceCache) { - expectedPackages.push( - ...generateExpectedPackagesForBaselineAdlibPiece( - context.studio, - ingestModel.rundownId, - baselineAdlibPieceCache - ) - ) - } else { - // We haven't regenerated anything, so preserve the values in the save - preserveTypesDuringSave.add(ExpectedPackageDBType.BASELINE_ADLIB_PIECE) - } - const baselineAdlibActionCache = forceBaseline - ? await ingestModel.rundownBaselineAdLibActions.get() - : ingestModel.rundownBaselineAdLibActions.getIfLoaded() - if (baselineAdlibActionCache) { - expectedPackages.push( - ...generateExpectedPackagesForBaselineAdlibAction( - context.studio, - ingestModel.rundownId, - baselineAdlibActionCache + if (adlib.expectedPackages) { + packages.push( + ...generateBucketExpectedPackages( + studio, + adlib.bucketId, + { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB, + pieceId: adlib._id, + pieceExternalId: adlib.externalId, + }, + adlib.expectedPackages ) ) - } else { - // We haven't regenerated anything, so preserve the values in the save - preserveTypesDuringSave.add(ExpectedPackageDBType.BASELINE_ADLIB_ACTION) - } - - if (baseline) { - // Fill in ids of unnamed expectedPackages - setDefaultIdOnExpectedPackages(baseline.expectedPackages) - - const bases = generateExpectedPackageBases( - context.studio, - ingestModel.rundownId, - baseline.expectedPackages ?? [] - ) - - expectedPackages.push( - ...bases.map((item): ExpectedPackageDBFromRundownBaselineObjects => { - return { - ...item, - fromPieceType: ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS, - rundownId: ingestModel.rundownId, - pieceId: null, - } - }) - ) - } else { - // We haven't regenerated anything, so preserve the values in the save - preserveTypesDuringSave.add(ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS) - } - - // Add expected packages for global pieces - for (const piece of ingestModel.getGlobalPieces()) { - if (piece.expectedPackages) { - const bases = generateExpectedPackageBases(context.studio, piece._id, piece.expectedPackages) - for (const base of bases) { - expectedPackages.push({ - ...base, - rundownId: ingestModel.rundownId, - pieceId: piece._id, - fromPieceType: ExpectedPackageDBType.BASELINE_PIECE, - }) - } - } } - // Preserve anything existing - for (const expectedPackage of ingestModel.expectedPackagesForRundownBaseline) { - if (preserveTypesDuringSave.has(expectedPackage.fromPieceType)) { - expectedPackages.push(clone(expectedPackage)) - } - } - - ingestModel.setExpectedPackagesForRundownBaseline(expectedPackages) -} - -function generateExpectedPackagesForPiece( - studio: ReadonlyDeep, - rundownId: RundownId, - segmentId: SegmentId, - pieces: ReadonlyDeep[], - type: ExpectedPackageDBType.PIECE | ExpectedPackageDBType.ADLIB_PIECE -) { - const packages: ExpectedPackageDBFromPiece[] = [] - for (const piece of pieces) { - const partId = 'startPartId' in piece ? piece.startPartId : piece.partId - if (piece.expectedPackages && partId) { - const bases = generateExpectedPackageBases(studio, piece._id, piece.expectedPackages) - for (const base of bases) { - packages.push({ - ...base, - rundownId, - segmentId, - partId, - pieceId: piece._id, - fromPieceType: type, - }) - } - } - } - return packages -} -function generateExpectedPackagesForBaselineAdlibPiece( - studio: ReadonlyDeep, - rundownId: RundownId, - pieces: ReadonlyDeep -) { - const packages: ExpectedPackageDBFromBaselineAdLibPiece[] = [] - for (const piece of pieces) { - if (piece.expectedPackages) { - const bases = generateExpectedPackageBases(studio, piece._id, piece.expectedPackages) - for (const base of bases) { - packages.push({ - ...base, - rundownId, - pieceId: piece._id, - fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_PIECE, - }) - } - } - } - return packages -} -function generateExpectedPackagesForAdlibAction( - studio: ReadonlyDeep, - rundownId: RundownId, - segmentId: SegmentId, - actions: ReadonlyDeep -) { - const packages: ExpectedPackageDBFromAdLibAction[] = [] - for (const action of actions) { - if (action.expectedPackages) { - const bases = generateExpectedPackageBases(studio, action._id, action.expectedPackages) - for (const base of bases) { - packages.push({ - ...base, - rundownId, - segmentId, - partId: action.partId, - pieceId: action._id, - fromPieceType: ExpectedPackageDBType.ADLIB_ACTION, - }) - } - } - } - return packages -} -function generateExpectedPackagesForBaselineAdlibAction( - studio: ReadonlyDeep, - rundownId: RundownId, - actions: ReadonlyDeep -) { - const packages: ExpectedPackageDBFromBaselineAdLibAction[] = [] - for (const action of actions) { - if (action.expectedPackages) { - const bases = generateExpectedPackageBases(studio, action._id, action.expectedPackages) - for (const base of bases) { - packages.push({ - ...base, - rundownId, - pieceId: action._id, - fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_ACTION, - }) - } - } - } return packages } -function generateExpectedPackagesForBucketAdlib(studio: ReadonlyDeep, adlibs: BucketAdLib[]) { - const packages: ExpectedPackageDBFromBucketAdLib[] = [] - for (const adlib of adlibs) { - if (adlib.expectedPackages) { - const bases = generateExpectedPackageBases(studio, adlib._id, adlib.expectedPackages) - for (const base of bases) { - packages.push({ - ...base, - bucketId: adlib.bucketId, - pieceId: adlib._id, - pieceExternalId: adlib.externalId, - fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB, - }) - } - } - } - return packages -} -function generateExpectedPackagesForBucketAdlibAction( - studio: ReadonlyDeep, - adlibActions: BucketAdLibAction[] -) { - const packages: ExpectedPackageDBFromBucketAdLibAction[] = [] - for (const action of adlibActions) { - if (action.expectedPackages) { - const bases = generateExpectedPackageBases(studio, action._id, action.expectedPackages) - for (const base of bases) { - packages.push({ - ...base, - bucketId: action.bucketId, +function generateExpectedPackagesForBucketAdlibAction(studio: ReadonlyDeep, action: BucketAdLibAction) { + const packages: ExpectedPackageDB[] = [] + + if (action.expectedPackages) { + packages.push( + ...generateBucketExpectedPackages( + studio, + action.bucketId, + { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION, pieceId: action._id, pieceExternalId: action.externalId, - fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION, - }) - } - } + }, + action.expectedPackages + ) + ) } + return packages } -function generateExpectedPackageBases( +function generateBucketExpectedPackages( studio: ReadonlyDeep, - ownerId: - | PieceId - | AdLibActionId - | RundownBaselineAdLibActionId - | BucketAdLibId - | BucketAdLibActionId - | RundownId - | StudioId, + bucketId: BucketId, + source: Omit, expectedPackages: ReadonlyDeep -) { - const bases: Omit[] = [] +): ExpectedPackageDB[] { + const bases: ExpectedPackageDB[] = [] for (let i = 0; i < expectedPackages.length; i++) { const expectedPackage = expectedPackages[i] - const id = expectedPackage._id || '__unnamed' + i + + const fullPackage: ReadonlyDeep = { + ...expectedPackage, + _id: expectedPackage._id || '__unnamed' + i, + } bases.push({ - ...clone(expectedPackage), - _id: getExpectedPackageId(ownerId, id), - blueprintPackageId: id, - contentVersionHash: getContentVersionHash(expectedPackage), + _id: getExpectedPackageId(bucketId, fullPackage), + package: fullPackage, studioId: studio._id, - created: Date.now(), + rundownId: null, + bucketId: bucketId, + created: Date.now(), // This will be preserved during the save if needed + ingestSources: [ + { + ...(source as any), // Because this is a generic, this spread doesnt work + blueprintPackageId: expectedPackage._id, + listenToPackageInfoUpdates: expectedPackage.listenToPackageInfoUpdates, + }, + ], + playoutSources: { + // These don't belong to a rundown, so can't be referenced by playout + pieceInstanceIds: [], + }, }) } + return bases } +async function writeUpdatedExpectedPackages( + context: JobContext, + bucketId: BucketId, + documentsToSave: ExpectedPackageDB[], + matchSource: Partial +): Promise { + const writeOps: AnyBulkWriteOperation[] = [] + + const documentIdsToSave = documentsToSave.map((doc) => doc._id) + + // Find which documents already exist in the database + // It would be nice to avoid this, but that would make the update operation incredibly complex + // There is no risk of race conditions, as bucket packages are only modified in the ingest job worker + const existingDocIds = new Set( + ( + await context.directCollections.ExpectedPackages.findFetch( + { + _id: { $in: documentIdsToSave }, + studioId: context.studioId, + bucketId: bucketId, + }, + { + projection: { + _id: 1, + }, + } + ) + ).map((doc) => doc._id) + ) + + for (const doc of documentsToSave) { + if (existingDocIds.has(doc._id)) { + // Document already exists, perform an update to merge the source into the existing document + writeOps.push({ + updateOne: { + filter: { + _id: doc._id, + ingestSources: { + // This is pretty messy, but we need to make sure that we don't add the same source twice + $not: { + $elemMatch: matchSource, + }, + }, + }, + update: { + $addToSet: { + ingestSources: doc.ingestSources[0], + }, + }, + }, + }) + } else { + // Perform a simple insert + writeOps.push({ + insertOne: { + document: doc, + }, + }) + } + } + + // Remove any old references from this source + writeOps.push({ + updateMany: { + filter: { + studioId: context.studioId, + bucketId: bucketId, + _id: { $nin: documentIdsToSave }, + }, + update: { + $pull: { + ingestSources: matchSource, + }, + }, + }, + }) + + await context.directCollections.ExpectedPackages.bulkWrite(writeOps) + + // Check for any packages that no longer have any sources + await cleanUpUnusedPackagesInBucket(context, bucketId) +} + export async function updateExpectedPackagesForBucketAdLibPiece( context: JobContext, adlib: BucketAdLib ): Promise { - const packages = generateExpectedPackagesForBucketAdlib(context.studio, [adlib]) + const documentsToSave = generateExpectedPackagesForBucketAdlib(context.studio, adlib) - await saveIntoDb(context, context.directCollections.ExpectedPackages, { pieceId: adlib._id }, packages) + await writeUpdatedExpectedPackages(context, adlib.bucketId, documentsToSave, { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB, + pieceId: adlib._id, + }) } export async function updateExpectedPackagesForBucketAdLibAction( context: JobContext, action: BucketAdLibAction ): Promise { - const packages = generateExpectedPackagesForBucketAdlibAction(context.studio, [action]) + const documentsToSave = generateExpectedPackagesForBucketAdlibAction(context.studio, action) - await saveIntoDb(context, context.directCollections.ExpectedPackages, { pieceId: action._id }, packages) + await writeUpdatedExpectedPackages(context, action.bucketId, documentsToSave, { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION, + pieceId: action._id, + }) } + export async function cleanUpExpectedPackagesForBucketAdLibs( context: JobContext, - adLibIds: BucketAdLibId[] + bucketId: BucketId, + adLibIds: Array ): Promise { if (adLibIds.length > 0) { - await context.directCollections.ExpectedPackages.remove({ - pieceId: { - $in: adLibIds, + // Remove the claim for the adlibs from any expected packages in the db + await context.directCollections.ExpectedPackages.update( + { + studioId: context.studioId, + bucketId: bucketId, + // Note: this could have the ingestSources match, but that feels excessive as the $pull performs the same check }, - }) + { + $pull: { + ingestSources: { + fromPieceType: { + $in: [ExpectedPackageDBType.BUCKET_ADLIB, ExpectedPackageDBType.BUCKET_ADLIB_ACTION], + }, + pieceId: { $in: adLibIds }, + } as any, // This cast isn't nice, but is needed for some reason + }, + } + ) + + // Remove any expected packages that have now have no owners + await cleanUpUnusedPackagesInBucket(context, bucketId) } } -export async function cleanUpExpectedPackagesForBucketAdLibsActions( - context: JobContext, - adLibIds: BucketAdLibActionId[] -): Promise { - if (adLibIds.length > 0) { - await context.directCollections.ExpectedPackages.remove({ - pieceId: { - $in: adLibIds, - }, - }) - } + +async function cleanUpUnusedPackagesInBucket(context: JobContext, bucketId: BucketId) { + await context.directCollections.ExpectedPackages.remove({ + studioId: context.studioId, + bucketId: bucketId, + ingestSources: { $size: 0 }, + // Future: these currently can't be referenced by playoutSources, but they could be in the future + }) } export function updateBaselineExpectedPackagesOnStudio( @@ -380,29 +269,21 @@ export function updateBaselineExpectedPackagesOnStudio( ): void { updateBaselineExpectedPlayoutItemsOnStudio(context, playoutModel, baseline.expectedPlayoutItems ?? []) - // Fill in ids of unnamed expectedPackages - setDefaultIdOnExpectedPackages(baseline.expectedPackages) - - const bases = generateExpectedPackageBases(context.studio, context.studio._id, baseline.expectedPackages ?? []) - playoutModel.setExpectedPackagesForStudioBaseline( - bases.map((item): ExpectedPackageDBFromStudioBaselineObjects => { - return { - ...item, - fromPieceType: ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS, - pieceId: null, - } - }) - ) + playoutModel.setExpectedPackagesForStudioBaseline(baseline.expectedPackages ?? []) } -export function setDefaultIdOnExpectedPackages(expectedPackages: ExpectedPackage.Any[] | undefined): void { - // Fill in ids of unnamed expectedPackage +export function sanitiseExpectedPackages(expectedPackages: ExpectedPackage.Any[] | undefined): void { if (expectedPackages) { - for (let i = 0; i < expectedPackages.length; i++) { - const expectedPackage = expectedPackages[i] - if (!expectedPackage._id) { - expectedPackage._id = `__index${i}` - } + for (const expectedPackage of expectedPackages) { + expectedPackage.contentVersionHash = getContentVersionHash(expectedPackage) } } } + +function getContentVersionHash(expectedPackage: ReadonlyDeep>): string { + return hashObj({ + content: expectedPackage.content, + version: expectedPackage.version, + // todo: should expectedPackage.sources.containerId be here as well? + }) +} diff --git a/packages/job-worker/src/ingest/generationRundown.ts b/packages/job-worker/src/ingest/generationRundown.ts index 17b0bd4931..113f6dfb42 100644 --- a/packages/job-worker/src/ingest/generationRundown.ts +++ b/packages/job-worker/src/ingest/generationRundown.ts @@ -1,7 +1,14 @@ -import { ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' -import { BlueprintId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { + ExpectedPackageDBType, + ExpectedPackageIngestSourceBaselineAdlibAction, + ExpectedPackageIngestSourceBaselineAdlibPiece, + ExpectedPackageIngestSourceBaselineObjects, + ExpectedPackageIngestSourceBaselinePiece, + ExpectedPackageIngestSourceRundownBaseline, +} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { BlueprintId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { RundownNote } from '@sofie-automation/corelib/dist/dataModel/Notes' -import { serializePieceTimelineObjectsBlob } from '@sofie-automation/corelib/dist/dataModel/Piece' +import { Piece, serializePieceTimelineObjectsBlob } from '@sofie-automation/corelib/dist/dataModel/Piece' import { DBRundown, RundownSource } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { literal } from '@sofie-automation/corelib/dist/lib' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' @@ -21,13 +28,20 @@ import { extendIngestRundownCore, canRundownBeUpdated } from './lib.js' import { JobContext } from '../jobs/index.js' import { CommitIngestData } from './lock.js' import { SelectedShowStyleVariant, selectShowStyleVariant } from './selectShowStyleVariant.js' -import { updateExpectedPackagesForRundownBaseline } from './expectedPackages.js' +import { updateExpectedMediaAndPlayoutItemsForRundownBaseline } from './expectedPackages.js' import { ReadonlyDeep } from 'type-fest' -import { BlueprintResultRundown, ExtendedIngestRundown } from '@sofie-automation/blueprints-integration' +import { + BlueprintResultRundown, + ExpectedPackage, + ExtendedIngestRundown, +} from '@sofie-automation/blueprints-integration' import { wrapTranslatableMessageFromBlueprints } from '@sofie-automation/corelib/dist/TranslatableMessage' import { convertRundownToBlueprintSegmentRundown, translateUserEditsFromBlueprint } from '../blueprints/context/lib.js' import { calculateSegmentsAndRemovalsFromIngestData } from './generationSegment.js' import { SofieIngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' +import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' +import { RundownBaselineAdLibAction } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibAction' +import { ExpectedPackageCollector, IngestExpectedPackage } from './model/IngestExpectedPackage.js' export enum GenerateRundownMode { Create = 'create', @@ -207,8 +221,8 @@ export async function regenerateRundownAndBaselineFromIngestData( const rundownBaselinePackages = allRundownWatchedPackages.filter( context, (pkg) => - pkg.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_ACTION || - pkg.fromPieceType === ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS + pkg.source.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_ACTION || + pkg.source.fromPieceType === ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS ) const blueprintContext = new GetRundownContext( @@ -321,9 +335,59 @@ export async function regenerateRundownAndBaselineFromIngestData( dbRundown._id ) - await ingestModel.setRundownBaseline(timelineObjectsBlob, adlibPieces, adlibActions, globalPieces) + const expectedPackages = generateExpectedPackagesForBaseline( + dbRundown._id, + adlibPieces, + adlibActions, + globalPieces, + rundownRes.baseline.expectedPackages ?? [] + ) + + await ingestModel.setRundownBaseline(timelineObjectsBlob, adlibPieces, adlibActions, globalPieces, expectedPackages) - await updateExpectedPackagesForRundownBaseline(context, ingestModel, rundownRes.baseline) + await updateExpectedMediaAndPlayoutItemsForRundownBaseline(context, ingestModel, rundownRes.baseline) return dbRundown } + +function generateExpectedPackagesForBaseline( + rundownId: RundownId, + adLibPieces: AdLibPiece[], + adLibActions: RundownBaselineAdLibAction[], + globalPieces: Piece[], + expectedPackages: ExpectedPackage.Any[] +): IngestExpectedPackage[] { + const collector = new ExpectedPackageCollector(rundownId) + + // This expects to generate multiple documents with the same packageId, these get deduplicated during saving. + // This should only concern itself with avoiding duplicates with the same source + + collector.addPackagesWithSource(expectedPackages, { + fromPieceType: ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS, + }) + + // Populate the ingestSources + for (const piece of adLibPieces) { + if (piece.expectedPackages) + collector.addPackagesWithSource(piece.expectedPackages, { + fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_PIECE, + pieceId: piece._id, + }) + } + for (const piece of adLibActions) { + if (piece.expectedPackages) + collector.addPackagesWithSource(piece.expectedPackages, { + fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_ACTION, + pieceId: piece._id, + }) + } + for (const piece of globalPieces) { + if (piece.expectedPackages) + collector.addPackagesWithSource(piece.expectedPackages, { + fromPieceType: ExpectedPackageDBType.BASELINE_PIECE, + pieceId: piece._id, + }) + } + + return collector.finish() +} diff --git a/packages/job-worker/src/ingest/generationSegment.ts b/packages/job-worker/src/ingest/generationSegment.ts index 5b679eebe8..e3583e8515 100644 --- a/packages/job-worker/src/ingest/generationSegment.ts +++ b/packages/job-worker/src/ingest/generationSegment.ts @@ -19,7 +19,7 @@ import { SofieIngestSegment, } from '@sofie-automation/blueprints-integration' import { wrapTranslatableMessageFromBlueprints } from '@sofie-automation/corelib/dist/TranslatableMessage' -import { updateExpectedPackagesForPartModel } from './expectedPackages.js' +import { updateExpectedMediaAndPlayoutItemsForPartModel } from './expectedPackages.js' import { IngestReplacePartType, IngestSegmentModel } from './model/IngestSegmentModel.js' import { ReadonlyDeep } from 'type-fest' import { Rundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' @@ -107,7 +107,7 @@ async function regenerateSegmentAndUpdateModelFull( const segmentId = ingestModel.getSegmentIdFromExternalId(ingestSegment.externalId) const segmentWatchedPackages = allRundownWatchedPackages.filter( context, - (p) => 'segmentId' in p && p.segmentId === segmentId + (p) => 'segmentId' in p.source && p.source.segmentId === segmentId ) let updatedSegmentModel = await regenerateSegmentAndUpdateModel( @@ -191,11 +191,10 @@ async function checkIfSegmentReferencesUnloadedPackageInfos( // check if there are any updates right away? for (const part of segmentModel.parts) { for (const expectedPackage of part.expectedPackages) { - if (expectedPackage.listenToPackageInfoUpdates) { - const loadedPackage = segmentWatchedPackages.getPackage(expectedPackage._id) - if (!loadedPackage) { + if (expectedPackage.source.listenToPackageInfoUpdates) { + if (!segmentWatchedPackages.hasPackage(expectedPackage.packageId)) { // The package didn't exist prior to the blueprint running - expectedPackageIdsToCheck.add(expectedPackage._id) + expectedPackageIdsToCheck.add(expectedPackage.packageId) } } } @@ -411,7 +410,7 @@ function updateModelWithGeneratedPart( ) const partModel = segmentModel.replacePart(part, processedPieces, adlibPieces, adlibActions) - updateExpectedPackagesForPartModel(context, partModel) + updateExpectedMediaAndPlayoutItemsForPartModel(context, partModel) } /** diff --git a/packages/job-worker/src/ingest/model/IngestExpectedPackage.ts b/packages/job-worker/src/ingest/model/IngestExpectedPackage.ts new file mode 100644 index 0000000000..631039368f --- /dev/null +++ b/packages/job-worker/src/ingest/model/IngestExpectedPackage.ts @@ -0,0 +1,66 @@ +import type { ExpectedPackage } from '@sofie-automation/blueprints-integration' +import { + getExpectedPackageId, + type ExpectedPackageDBType, + type ExpectedPackageIngestSourcePart, + type ExpectedPackageIngestSourceRundownBaseline, +} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import type { BucketId, ExpectedPackageId, RundownId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import type { ReadonlyDeep } from 'type-fest' + +/** + * A simpler form of ExpectedPackageDB that is scoped to the properties relevant to ingest. + * This is limited to be owned by one source, during the save process the documents will be merged + */ +export interface IngestExpectedPackage< + TPackageSource extends { fromPieceType: ExpectedPackageDBType } = + | ExpectedPackageIngestSourcePart + | ExpectedPackageIngestSourceRundownBaseline, +> { + packageId: ExpectedPackageId + + package: ReadonlyDeep> + + source: TPackageSource +} + +export class ExpectedPackageCollector { + readonly #parentId: RundownId | StudioId | BucketId + readonly #packages: IngestExpectedPackage[] = [] + + constructor(parentId: RundownId | StudioId | BucketId) { + this.#parentId = parentId + } + + addPackagesWithSource = ( // never to force the caller to specify the type + expectedPackages: ReadonlyDeep[], + source: Omit + ): void => { + const insertedPackagesForSource = new Set() + for (const expectedPackage of expectedPackages) { + const id = getExpectedPackageId(this.#parentId, expectedPackage) + + // Deduplicate with an id including the blueprintPackageId. + // This is to ensure the blueprints can reference the package with that id still + const uniqueId = `${id}-${expectedPackage._id}-${expectedPackage.listenToPackageInfoUpdates ?? false}` + + // Ensure only inserted once for this source + if (insertedPackagesForSource.has(uniqueId)) continue + insertedPackagesForSource.add(uniqueId) + + this.#packages.push({ + packageId: id, + package: expectedPackage, + source: { + ...(source as any), // Because this is a generic, this spread doesnt work + blueprintPackageId: expectedPackage._id, + listenToPackageInfoUpdates: expectedPackage.listenToPackageInfoUpdates, + }, + }) + } + } + + finish(): IngestExpectedPackage[] { + return this.#packages + } +} diff --git a/packages/job-worker/src/ingest/model/IngestModel.ts b/packages/job-worker/src/ingest/model/IngestModel.ts index 942095330d..946237b857 100644 --- a/packages/job-worker/src/ingest/model/IngestModel.ts +++ b/packages/job-worker/src/ingest/model/IngestModel.ts @@ -1,10 +1,4 @@ -import { - ExpectedPackageDBFromBaselineAdLibAction, - ExpectedPackageDBFromBaselineAdLibPiece, - ExpectedPackageDBFromBaselinePiece, - ExpectedPackageDBFromRundownBaselineObjects, - ExpectedPackageFromRundown, -} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import type { ExpectedPackageIngestSource } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItemRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' import { ExpectedPackageId, @@ -19,7 +13,7 @@ import { CoreUserEditingDefinition } from '@sofie-automation/corelib/dist/dataMo import { RundownBaselineAdLibAction } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibAction' import { RundownBaselineAdLibItem } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibPiece' import { LazyInitialiseReadonly } from '../../lib/lazy.js' -import { RundownLock } from '../../jobs/lock.js' +import type { PlaylistLock, RundownLock } from '../../jobs/lock.js' import { IngestSegmentModel, IngestSegmentModelReadonly } from './IngestSegmentModel.js' import { IngestPartModel, IngestPartModelReadonly } from './IngestPartModel.js' import { ReadonlyDeep } from 'type-fest' @@ -32,13 +26,7 @@ import { ProcessedShowStyleBase, ProcessedShowStyleVariant } from '../../jobs/sh import { WrappedShowStyleBlueprint } from '../../blueprints/cache.js' import { IBlueprintRundown } from '@sofie-automation/blueprints-integration' import type { INotificationsModel } from '../../notifications/NotificationsModel.js' - -export type ExpectedPackageForIngestModelBaseline = - | ExpectedPackageDBFromBaselineAdLibAction - | ExpectedPackageDBFromBaselineAdLibPiece - | ExpectedPackageDBFromRundownBaselineObjects - | ExpectedPackageDBFromBaselinePiece -export type ExpectedPackageForIngestModel = ExpectedPackageFromRundown | ExpectedPackageForIngestModelBaseline +import type { IngestExpectedPackage } from './IngestExpectedPackage.js' export interface IngestModelReadonly { /** @@ -62,7 +50,7 @@ export interface IngestModelReadonly { /** * The ExpectedPackages for the baseline of this Rundown */ - readonly expectedPackagesForRundownBaseline: ReadonlyDeep[] + readonly expectedPackagesForRundownBaseline: ReadonlyDeep[] /** * The baseline Timeline objects of this Rundown @@ -147,7 +135,7 @@ export interface IngestModelReadonly { * Search for an ExpectedPackage through the whole Rundown * @param id Id of the ExpectedPackage */ - findExpectedPackage(packageId: ExpectedPackageId): ReadonlyDeep | undefined + findExpectedPackageIngestSources(packageId: ExpectedPackageId): ReadonlyDeep[] } export interface IngestModel extends IngestModelReadonly, BaseModel, INotificationsModel { @@ -209,12 +197,6 @@ export interface IngestModel extends IngestModelReadonly, BaseModel, INotificati */ setExpectedPlayoutItemsForRundownBaseline(expectedPlayoutItems: ExpectedPlayoutItemRundown[]): void - /** - * Set the ExpectedPackages for the baseline of this Rundown - * @param expectedPackages The new ExpectedPackages - */ - setExpectedPackagesForRundownBaseline(expectedPackages: ExpectedPackageForIngestModelBaseline[]): void - /** * Set the data for this Rundown. * This will either update or create the Rundown @@ -246,7 +228,8 @@ export interface IngestModel extends IngestModelReadonly, BaseModel, INotificati timelineObjectsBlob: PieceTimelineObjectsBlob, adlibPieces: RundownBaselineAdLibItem[], adlibActions: RundownBaselineAdLibAction[], - pieces: Piece[] + pieces: Piece[], + expectedPackages: IngestExpectedPackage[] ): Promise /** @@ -271,3 +254,10 @@ export interface IngestModel extends IngestModelReadonly, BaseModel, INotificati } export type IngestReplaceSegmentType = Omit + +export interface IngestDatabasePersistedModel { + /** + * Issue a save of the contents of this model to the database + */ + saveAllToDatabase(lock: PlaylistLock): Promise +} diff --git a/packages/job-worker/src/ingest/model/IngestPartModel.ts b/packages/job-worker/src/ingest/model/IngestPartModel.ts index e047d08a94..65214e73b0 100644 --- a/packages/job-worker/src/ingest/model/IngestPartModel.ts +++ b/packages/job-worker/src/ingest/model/IngestPartModel.ts @@ -2,9 +2,9 @@ import { ReadonlyDeep } from 'type-fest' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' -import { ExpectedPackageFromRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItemRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' +import { IngestExpectedPackage } from './IngestExpectedPackage.js' export interface IngestPartModelReadonly { /** @@ -32,7 +32,7 @@ export interface IngestPartModelReadonly { /** * The ExpectedPackages belonging to this Part */ - readonly expectedPackages: ReadonlyDeep[] + readonly expectedPackages: ReadonlyDeep[] } /** * Wrap a Part and its contents in a view for Ingest operations @@ -49,10 +49,4 @@ export interface IngestPartModel extends IngestPartModelReadonly { * @param expectedPlayoutItems The new ExpectedPlayoutItems */ setExpectedPlayoutItems(expectedPlayoutItems: ExpectedPlayoutItemRundown[]): void - - /** - * Set the ExpectedPackages for the contents of this Part - * @param expectedPackages The new ExpectedPackages - */ - setExpectedPackages(expectedPackages: ExpectedPackageFromRundown[]): void } diff --git a/packages/job-worker/src/ingest/model/implementation/DocumentChangeTracker.ts b/packages/job-worker/src/ingest/model/implementation/DocumentChangeTracker.ts index c3396f04be..a45e3e19c4 100644 --- a/packages/job-worker/src/ingest/model/implementation/DocumentChangeTracker.ts +++ b/packages/job-worker/src/ingest/model/implementation/DocumentChangeTracker.ts @@ -98,6 +98,10 @@ export class DocumentChangeTracker }> { return Array.from(this.#deletedIds.values()) } + getDocumentsToSave(): ReadonlyMap { + return this.#documentsToSave + } + /** * Generate the mongodb BulkWrite operations for the documents known to this tracker * @returns mongodb BulkWrite operations diff --git a/packages/job-worker/src/ingest/model/implementation/ExpectedPackagesStore.ts b/packages/job-worker/src/ingest/model/implementation/ExpectedPackagesStore.ts index 9d79e27b74..b7508f80b3 100644 --- a/packages/job-worker/src/ingest/model/implementation/ExpectedPackagesStore.ts +++ b/packages/job-worker/src/ingest/model/implementation/ExpectedPackagesStore.ts @@ -1,71 +1,49 @@ -import { ExpectedPackageDBBase } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItemRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' -import { - ExpectedPackageId, - ExpectedPlayoutItemId, - PartId, - RundownId, - SegmentId, -} from '@sofie-automation/corelib/dist/dataModel/Ids' +import { ExpectedPlayoutItemId, PartId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { ReadonlyDeep } from 'type-fest' import { diffAndReturnLatestObjects, DocumentChanges, getDocumentChanges, setValuesAndTrackChanges } from './utils.js' +import type { IngestExpectedPackage } from '../IngestExpectedPackage.js' +import { ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' -function mutateExpectedPackage( - oldObj: ExpectedPackageType, - newObj: ExpectedPackageType -): ExpectedPackageType { - return { - ...newObj, - // Retain the created property - created: oldObj.created, - } -} - -export class ExpectedPackagesStore { +export class ExpectedPackagesStore { #expectedPlayoutItems: ExpectedPlayoutItemRundown[] - #expectedPackages: ExpectedPackageType[] + #expectedPackages: IngestExpectedPackage[] #expectedPlayoutItemsWithChanges = new Set() - #expectedPackagesWithChanges = new Set() + #expectedPackagesHasChanges = false get expectedPlayoutItems(): ReadonlyDeep { return this.#expectedPlayoutItems } - get expectedPackages(): ReadonlyDeep { - // Typescript is not happy with turning ExpectedPackageType into ReadonlyDeep because it can be a union - return this.#expectedPackages as any + get expectedPackages(): ReadonlyDeep[]> { + // Typescript is not happy because of the generic + return this.#expectedPackages as ReadonlyDeep>[] } get hasChanges(): boolean { - return this.#expectedPlayoutItemsWithChanges.size > 0 || this.#expectedPackagesWithChanges.size > 0 + return this.#expectedPlayoutItemsWithChanges.size > 0 || this.#expectedPackagesHasChanges } get expectedPlayoutItemsChanges(): DocumentChanges { return getDocumentChanges(this.#expectedPlayoutItemsWithChanges, this.#expectedPlayoutItems) } - get expectedPackagesChanges(): DocumentChanges { - return getDocumentChanges(this.#expectedPackagesWithChanges, this.#expectedPackages) - } clearChangedFlags(): void { this.#expectedPlayoutItemsWithChanges.clear() - this.#expectedPackagesWithChanges.clear() + this.#expectedPackagesHasChanges = false } #rundownId: RundownId - #segmentId: SegmentId | undefined #partId: PartId | undefined constructor( isBeingCreated: boolean, rundownId: RundownId, - segmentId: SegmentId | undefined, partId: PartId | undefined, expectedPlayoutItems: ExpectedPlayoutItemRundown[], - expectedPackages: ExpectedPackageType[] + expectedPackages: IngestExpectedPackage[] ) { this.#rundownId = rundownId - this.#segmentId = segmentId this.#partId = partId this.#expectedPlayoutItems = expectedPlayoutItems @@ -76,42 +54,38 @@ export class ExpectedPackagesStore boolean + ): void { this.#rundownId = rundownId - this.#segmentId = segmentId this.#partId = partId setValuesAndTrackChanges(this.#expectedPlayoutItemsWithChanges, this.#expectedPlayoutItems, { rundownId, partId, }) - setValuesAndTrackChanges(this.#expectedPackagesWithChanges, this.#expectedPackages, { - rundownId, - // @ts-expect-error Not all ExpectedPackage types have this property - segmentId, - partId, - }) + for (const expectedPackage of this.#expectedPackages) { + const mutatorChanged = updatePackageSource(expectedPackage.source) + + // The doc changed, track it as such + if (mutatorChanged) this.#expectedPackagesHasChanges = true + } } - compareToPreviousData(oldStore: ExpectedPackagesStore): void { + compareToPreviousData(oldStore: ExpectedPackagesStore): void { // Diff the objects, but don't update the stored copies diffAndReturnLatestObjects( this.#expectedPlayoutItemsWithChanges, oldStore.#expectedPlayoutItems, this.#expectedPlayoutItems ) - diffAndReturnLatestObjects( - this.#expectedPackagesWithChanges, - oldStore.#expectedPackages, - this.#expectedPackages, - mutateExpectedPackage - ) + this.#expectedPackagesHasChanges = true } setExpectedPlayoutItems(expectedPlayoutItems: ExpectedPlayoutItemRundown[]): void { @@ -127,19 +101,8 @@ export class ExpectedPackagesStore ({ - ...pkg, - partId: this.#partId, - segmentId: this.#segmentId, - rundownId: this.#rundownId, - })) - - this.#expectedPackages = diffAndReturnLatestObjects( - this.#expectedPackagesWithChanges, - this.#expectedPackages, - newExpectedPackages, - mutateExpectedPackage - ) + setExpectedPackages(expectedPackages: IngestExpectedPackage[]): void { + this.#expectedPackagesHasChanges = true + this.#expectedPackages = [...expectedPackages] } } diff --git a/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts b/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts index 41ad98f6a3..47b916a932 100644 --- a/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts +++ b/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts @@ -3,7 +3,9 @@ import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' import { ExpectedPackageDB, ExpectedPackageDBType, - ExpectedPackageFromRundown, + ExpectedPackageIngestSource, + ExpectedPackageIngestSourcePart, + ExpectedPackageIngestSourceRundownBaseline, } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItemRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' import { @@ -27,11 +29,12 @@ import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' import { JobContext, ProcessedShowStyleBase, ProcessedShowStyleVariant } from '../../../jobs/index.js' import { LazyInitialise, LazyInitialiseReadonly } from '../../../lib/lazy.js' import { getRundownId, getSegmentId } from '../../lib.js' -import { RundownLock } from '../../../jobs/lock.js' +import { PlaylistLock, RundownLock } from '../../../jobs/lock.js' import { IngestSegmentModel } from '../IngestSegmentModel.js' import { IngestSegmentModelImpl } from './IngestSegmentModelImpl.js' import { IngestPartModel } from '../IngestPartModel.js' import { + assertNever, clone, Complete, deleteAllUndefinedProperties, @@ -40,15 +43,9 @@ import { literal, } from '@sofie-automation/corelib/dist/lib' import { IngestPartModelImpl } from './IngestPartModelImpl.js' -import { DatabasePersistedModel } from '../../../modelBase.js' import { ExpectedPackagesStore } from './ExpectedPackagesStore.js' import { ReadonlyDeep } from 'type-fest' -import { - ExpectedPackageForIngestModel, - ExpectedPackageForIngestModelBaseline, - IngestModel, - IngestReplaceSegmentType, -} from '../IngestModel.js' +import { IngestDatabasePersistedModel, IngestModel, IngestReplaceSegmentType } from '../IngestModel.js' import { RundownNote } from '@sofie-automation/corelib/dist/dataModel/Notes' import { diffAndReturnLatestObjects } from './utils.js' import _ from 'underscore' @@ -61,6 +58,7 @@ import { generateWriteOpsForLazyDocuments } from './DocumentChangeTracker.js' import { IS_PRODUCTION } from '../../../environment.js' import { logger } from '../../../logging.js' import { NotificationsModelHelper } from '../../../notifications/NotificationsModelHelper.js' +import { IngestExpectedPackage } from '../IngestExpectedPackage.js' export interface IngestModelImplExistingData { rundown: DBRundown @@ -84,7 +82,7 @@ interface SegmentWrapper { /** * Cache of relevant documents for an Ingest Operation */ -export class IngestModelImpl implements IngestModel, DatabasePersistedModel { +export class IngestModelImpl implements IngestModel, IngestDatabasePersistedModel { public readonly isIngest = true public readonly rundownLock: RundownLock @@ -117,7 +115,7 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { readonly #piecesWithChanges = new Set() #piecesImpl: ReadonlyArray - readonly #rundownBaselineExpectedPackagesStore: ExpectedPackagesStore + readonly #rundownBaselineExpectedPackagesStore: ExpectedPackagesStore get rundownBaselineTimelineObjects(): LazyInitialiseReadonly { // Return a simplified view of what we store, of just `timelineObjectsString` @@ -146,7 +144,7 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { get expectedPlayoutItemsForRundownBaseline(): ReadonlyDeep[] { return [...this.#rundownBaselineExpectedPackagesStore.expectedPlayoutItems] } - get expectedPackagesForRundownBaseline(): ReadonlyDeep[] { + get expectedPackagesForRundownBaseline(): ReadonlyDeep[] { return [...this.#rundownBaselineExpectedPackagesStore.expectedPackages] } @@ -172,25 +170,14 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { const groupedExpectedPlayoutItems = groupByToMap(existingData.expectedPlayoutItems, 'partId') - const rundownExpectedPackages = existingData.expectedPackages.filter( - (pkg): pkg is ExpectedPackageFromRundown => - pkg.fromPieceType === ExpectedPackageDBType.PIECE || - pkg.fromPieceType === ExpectedPackageDBType.ADLIB_PIECE || - pkg.fromPieceType === ExpectedPackageDBType.ADLIB_ACTION - ) - const groupedExpectedPackages = groupByToMap(rundownExpectedPackages, 'partId') - const baselineExpectedPackages = existingData.expectedPackages.filter( - (pkg): pkg is ExpectedPackageForIngestModelBaseline => - pkg.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_ACTION || - pkg.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_PIECE || - pkg.fromPieceType === ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS + const { baselineExpectedPackages, groupedExpectedPackagesByPart } = groupExpectedPackages( + existingData.expectedPackages ) this.#rundownBaselineExpectedPackagesStore = new ExpectedPackagesStore( false, this.rundownId, undefined, - undefined, groupedExpectedPlayoutItems.get(undefined) ?? [], baselineExpectedPackages ) @@ -209,7 +196,7 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { groupedAdLibPieces.get(part._id) ?? [], groupedAdLibActions.get(part._id) ?? [], groupedExpectedPlayoutItems.get(part._id) ?? [], - groupedExpectedPackages.get(part._id) ?? [] + groupedExpectedPackagesByPart.get(part._id) ?? [] ) ) this.segmentsImpl.set(segment._id, { @@ -242,7 +229,6 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { true, this.rundownId, undefined, - undefined, [], [] ) @@ -352,18 +338,20 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { return undefined } - findExpectedPackage(packageId: ExpectedPackageId): ReadonlyDeep | undefined { - const baselinePackage = this.#rundownBaselineExpectedPackagesStore.expectedPackages.find( - (pkg) => pkg._id === packageId - ) - if (baselinePackage) return baselinePackage + findExpectedPackageIngestSources(packageId: ExpectedPackageId): ReadonlyDeep[] { + const sources: ReadonlyDeep[] = [] + + for (const baselinePackage of this.#rundownBaselineExpectedPackagesStore.expectedPackages) { + if (baselinePackage.packageId === packageId) sources.push(baselinePackage.source) + } for (const part of this.getAllOrderedParts()) { - const partPackage = part.expectedPackages.find((pkg) => pkg._id === packageId) - if (partPackage) return partPackage + for (const partPackage of part.expectedPackages) { + if (partPackage.packageId === packageId) sources.push(partPackage.source) + } } - return undefined + return sources } removeSegment(id: SegmentId): void { @@ -413,10 +401,6 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { setExpectedPlayoutItemsForRundownBaseline(expectedPlayoutItems: ExpectedPlayoutItemRundown[]): void { this.#rundownBaselineExpectedPackagesStore.setExpectedPlayoutItems(expectedPlayoutItems) } - setExpectedPackagesForRundownBaseline(expectedPackages: ExpectedPackageForIngestModelBaseline[]): void { - // Future: should these be here, or held as part of each adlib? - this.#rundownBaselineExpectedPackagesStore.setExpectedPackages(expectedPackages) - } setRundownData( rundownData: IBlueprintRundown, @@ -473,7 +457,8 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { timelineObjectsBlob: PieceTimelineObjectsBlob, adlibPieces: RundownBaselineAdLibItem[], adlibActions: RundownBaselineAdLibAction[], - pieces: Piece[] + pieces: Piece[], + expectedPackages: IngestExpectedPackage[] ): Promise { const [loadedRundownBaselineObjs, loadedRundownBaselineAdLibPieces, loadedRundownBaselineAdLibActions] = await Promise.all([ @@ -536,6 +521,9 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { }) ) this.#piecesImpl = diffAndReturnLatestObjects(this.#piecesWithChanges, this.#piecesImpl, newPieces) + + // Future: should these be here, or held as part of each adlib? + this.#rundownBaselineExpectedPackagesStore.setExpectedPackages(expectedPackages) } setRundownOrphaned(orphaned: RundownOrphanedReason | undefined): void { @@ -673,7 +661,7 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { this.#disposed = true } - async saveAllToDatabase(): Promise { + async saveAllToDatabase(playlistLock: PlaylistLock): Promise { if (this.#disposed) { throw new Error('Cannot save disposed IngestModel') } @@ -682,6 +670,10 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { throw new Error('Cannot save changes with released RundownLock') } + if (this.#rundownImpl && playlistLock.playlistId !== this.#rundownImpl.playlistId) { + throw new Error('Cannot save changes with incorrect PlaylistLock') + } + const span = this.context.startSpan('IngestModelImpl.saveAllToDatabase') // Ensure there are no duplicate part ids @@ -691,7 +683,7 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { partIds.add(part.part._id) } - const saveHelper = new SaveIngestModelHelper() + const saveHelper = new SaveIngestModelHelper(this.rundownId) for (const [segmentId, segment] of this.segmentsImpl.entries()) { saveHelper.addSegment(segment.segmentModel, segment.deleted) if (segment.deleted) { @@ -734,3 +726,50 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { span?.end() } } + +function groupExpectedPackages(expectedPackages: ExpectedPackageDB[]) { + const baselineExpectedPackages: IngestExpectedPackage[] = [] + const groupedExpectedPackagesByPart = new Map[]>() + + for (const expectedPackage of expectedPackages) { + for (const source of expectedPackage.ingestSources) { + switch (source.fromPieceType) { + case ExpectedPackageDBType.BASELINE_PIECE: + case ExpectedPackageDBType.BASELINE_ADLIB_ACTION: + case ExpectedPackageDBType.BASELINE_ADLIB_PIECE: + case ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: + baselineExpectedPackages.push({ + packageId: expectedPackage._id, + package: expectedPackage.package, + source: source, + }) + break + case ExpectedPackageDBType.PIECE: + case ExpectedPackageDBType.ADLIB_PIECE: + case ExpectedPackageDBType.ADLIB_ACTION: { + const partPackages = groupedExpectedPackagesByPart.get(source.partId) ?? [] + partPackages.push({ + packageId: expectedPackage._id, + package: expectedPackage.package, + source: source, + }) + groupedExpectedPackagesByPart.set(source.partId, partPackages) + break + } + case ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: + case ExpectedPackageDBType.BUCKET_ADLIB: + case ExpectedPackageDBType.BUCKET_ADLIB_ACTION: + // Ignore + break + default: + assertNever(source) + break + } + } + } + + return { + baselineExpectedPackages, + groupedExpectedPackagesByPart, + } +} diff --git a/packages/job-worker/src/ingest/model/implementation/IngestPartModelImpl.ts b/packages/job-worker/src/ingest/model/implementation/IngestPartModelImpl.ts index b5b3c1e7c9..0e6c4e4b34 100644 --- a/packages/job-worker/src/ingest/model/implementation/IngestPartModelImpl.ts +++ b/packages/job-worker/src/ingest/model/implementation/IngestPartModelImpl.ts @@ -6,7 +6,6 @@ import { AdLibActionId, PieceId, RundownId, SegmentId } from '@sofie-automation/ import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' import { ExpectedPlayoutItemRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' -import { ExpectedPackageFromRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' import { ExpectedPackagesStore } from './ExpectedPackagesStore.js' import { @@ -16,13 +15,15 @@ import { getDocumentChanges, setValuesAndTrackChanges, } from './utils.js' +import type { IngestExpectedPackage } from '../IngestExpectedPackage.js' +import { ExpectedPackageIngestSourcePart } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' export class IngestPartModelImpl implements IngestPartModel { readonly partImpl: DBPart readonly #pieces: Piece[] readonly #adLibPieces: AdLibPiece[] readonly #adLibActions: AdLibAction[] - readonly expectedPackagesStore: ExpectedPackagesStore + readonly expectedPackagesStore: ExpectedPackagesStore #setPartValue(key: T, newValue: DBPart[T]): void { if (newValue === undefined) { @@ -86,7 +87,7 @@ export class IngestPartModelImpl implements IngestPartModel { get expectedPlayoutItems(): ReadonlyDeep[] { return [...this.expectedPackagesStore.expectedPlayoutItems] } - get expectedPackages(): ReadonlyDeep[] { + get expectedPackages(): ReadonlyDeep[] { return [...this.expectedPackagesStore.expectedPackages] } @@ -135,7 +136,7 @@ export class IngestPartModelImpl implements IngestPartModel { adLibPieces: AdLibPiece[], adLibActions: AdLibAction[], expectedPlayoutItems: ExpectedPlayoutItemRundown[], - expectedPackages: ExpectedPackageFromRundown[] + expectedPackages: IngestExpectedPackage[] ) { this.partImpl = part this.#pieces = pieces @@ -159,7 +160,6 @@ export class IngestPartModelImpl implements IngestPartModel { this.expectedPackagesStore = new ExpectedPackagesStore( isBeingCreated, part.rundownId, - part.segmentId, part._id, expectedPlayoutItems, expectedPackages @@ -172,7 +172,7 @@ export class IngestPartModelImpl implements IngestPartModel { /** * This IngestPartModel replaces an existing one. - * Run some comparisons to ensure that + * Run some comparisons to ensure that the changed flags are set correctly * @param previousModel */ compareToPreviousModel(previousModel: IngestPartModelImpl): void { @@ -205,7 +205,14 @@ export class IngestPartModelImpl implements IngestPartModel { this.#compareAndSetPartValue('segmentId', segmentId) this.#compareAndSetPartValue('rundownId', rundownId) - this.expectedPackagesStore.setOwnerIds(rundownId, segmentId, this.part._id) + this.expectedPackagesStore.setOwnerIds(rundownId, this.part._id, (pkgSource) => { + if (pkgSource.partId !== this.part._id || pkgSource.segmentId !== segmentId) { + pkgSource.partId = this.part._id + pkgSource.segmentId = segmentId + return true + } + return false + }) setValuesAndTrackChanges(this.#piecesWithChanges, this.#pieces, { startRundownId: rundownId, @@ -225,8 +232,4 @@ export class IngestPartModelImpl implements IngestPartModel { setExpectedPlayoutItems(expectedPlayoutItems: ExpectedPlayoutItemRundown[]): void { this.expectedPackagesStore.setExpectedPlayoutItems(expectedPlayoutItems) } - setExpectedPackages(expectedPackages: ExpectedPackageFromRundown[]): void { - // Future: should these be here, or held as part of each adlib/piece? - this.expectedPackagesStore.setExpectedPackages(expectedPackages) - } } diff --git a/packages/job-worker/src/ingest/model/implementation/IngestSegmentModelImpl.ts b/packages/job-worker/src/ingest/model/implementation/IngestSegmentModelImpl.ts index 5a965abf46..bcd29965d1 100644 --- a/packages/job-worker/src/ingest/model/implementation/IngestSegmentModelImpl.ts +++ b/packages/job-worker/src/ingest/model/implementation/IngestSegmentModelImpl.ts @@ -1,4 +1,4 @@ -import { PartId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { PartId, RundownId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { ReadonlyDeep } from 'type-fest' import { DBSegment, SegmentOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Segment' import { IngestReplacePartType, IngestSegmentModel } from '../IngestSegmentModel.js' @@ -12,6 +12,13 @@ import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' import { calculatePartExpectedDurationWithTransition } from '@sofie-automation/corelib/dist/playout/timings' import { clone } from '@sofie-automation/corelib/dist/lib' import { getPartId } from '../../lib.js' +import { + ExpectedPackageDBType, + ExpectedPackageIngestSourceAdlibAction, + ExpectedPackageIngestSourcePart, + ExpectedPackageIngestSourcePiece, +} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { ExpectedPackageCollector, type IngestExpectedPackage } from '../IngestExpectedPackage.js' /** * A light wrapper around the IngestPartModel, so that we can track the deletions while still accessing the contents @@ -207,7 +214,7 @@ export class IngestSegmentModelImpl implements IngestSegmentModel { replacePart( rawPart: IngestReplacePartType, pieces: Piece[], - adLibPiece: AdLibPiece[], + adLibPieces: AdLibPiece[], adLibActions: AdLibAction[] ): IngestPartModel { const part: DBPart = { @@ -224,14 +231,23 @@ export class IngestSegmentModelImpl implements IngestSegmentModel { const oldPart = this.partsImpl.get(part._id) + const expectedPackages = generateExpectedPackagesForPart( + part.rundownId, + part.segmentId, + part._id, + pieces, + adLibPieces, + adLibActions + ) + const partModel = new IngestPartModelImpl( !oldPart, clone(part), clone(pieces), - clone(adLibPiece), + clone(adLibPieces), clone(adLibActions), [], - [] + expectedPackages ) partModel.setOwnerIds(this.segment.rundownId, this.segment._id) @@ -242,3 +258,48 @@ export class IngestSegmentModelImpl implements IngestSegmentModel { return partModel } } + +function generateExpectedPackagesForPart( + rundownId: RundownId, + segmentId: SegmentId, + partId: PartId, + pieces: Piece[], + adLibPieces: AdLibPiece[], + adLibActions: AdLibAction[] +): IngestExpectedPackage[] { + const collector = new ExpectedPackageCollector(rundownId) + + // This expects to generate multiple documents with the same packageId, these get deduplicated during saving. + // This should only concern itself with avoiding duplicates with the same source + + // Populate the ingestSources + for (const piece of pieces) { + if (piece.expectedPackages) + collector.addPackagesWithSource(piece.expectedPackages, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: piece._id, + partId: partId, + segmentId: segmentId, + }) + } + for (const piece of adLibPieces) { + if (piece.expectedPackages) + collector.addPackagesWithSource(piece.expectedPackages, { + fromPieceType: ExpectedPackageDBType.ADLIB_PIECE, + pieceId: piece._id, + partId: partId, + segmentId: segmentId, + }) + } + for (const piece of adLibActions) { + if (piece.expectedPackages) + collector.addPackagesWithSource(piece.expectedPackages, { + fromPieceType: ExpectedPackageDBType.ADLIB_ACTION, + pieceId: piece._id, + partId: partId, + segmentId: segmentId, + }) + } + + return collector.finish() +} diff --git a/packages/job-worker/src/ingest/model/implementation/LoadIngestModel.ts b/packages/job-worker/src/ingest/model/implementation/LoadIngestModel.ts index 628a1ce24c..7f25e02afc 100644 --- a/packages/job-worker/src/ingest/model/implementation/LoadIngestModel.ts +++ b/packages/job-worker/src/ingest/model/implementation/LoadIngestModel.ts @@ -3,8 +3,7 @@ import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { JobContext } from '../../../jobs/index.js' import { ReadonlyDeep } from 'type-fest' import { RundownLock } from '../../../jobs/lock.js' -import { IngestModel } from '../IngestModel.js' -import { DatabasePersistedModel } from '../../../modelBase.js' +import { IngestDatabasePersistedModel, IngestModel } from '../IngestModel.js' import { getRundownId } from '../../lib.js' import { ExpectedPlayoutItemRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' import { RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' @@ -23,7 +22,7 @@ export async function loadIngestModelFromRundown( context: JobContext, rundownLock: RundownLock, rundown: ReadonlyDeep -): Promise { +): Promise { const span = context.startSpan('IngestModel.loadFromRundown') if (span) span.setLabel('rundownId', unprotectString(rundown._id)) @@ -57,7 +56,7 @@ export async function loadIngestModelFromRundownExternalId( context: JobContext, rundownLock: RundownLock, rundownExternalId: string -): Promise { +): Promise { const span = context.startSpan('IngestModel.loadFromExternalId') if (span) span.setLabel('externalId', rundownExternalId) diff --git a/packages/job-worker/src/ingest/model/implementation/SaveIngestModel.ts b/packages/job-worker/src/ingest/model/implementation/SaveIngestModel.ts index 296b4b2b30..5bd2806aeb 100644 --- a/packages/job-worker/src/ingest/model/implementation/SaveIngestModel.ts +++ b/packages/job-worker/src/ingest/model/implementation/SaveIngestModel.ts @@ -1,8 +1,12 @@ import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' -import { ExpectedPackageDB } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { + ExpectedPackageDB, + ExpectedPackageDBType, + isPackageReferencedByPlayout, +} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItem } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' -import { PieceId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { PieceId, ExpectedPackageId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' @@ -12,9 +16,14 @@ import { IngestSegmentModelImpl } from './IngestSegmentModelImpl.js' import { DocumentChangeTracker } from './DocumentChangeTracker.js' import { logger } from '../../../logging.js' import { ProtectedString } from '@sofie-automation/corelib/dist/protectedString' +import { IngestExpectedPackage } from '../IngestExpectedPackage.js' +import { AnyBulkWriteOperation } from 'mongodb' +import { normalizeArrayToMap } from '@sofie-automation/corelib/dist/lib' export class SaveIngestModelHelper { - #expectedPackages = new DocumentChangeTracker() + readonly #rundownId: RundownId + + #expectedPackages: IngestExpectedPackage[] = [] #expectedPlayoutItems = new DocumentChangeTracker() #segments = new DocumentChangeTracker() @@ -23,11 +32,15 @@ export class SaveIngestModelHelper { #adLibPieces = new DocumentChangeTracker() #adLibActions = new DocumentChangeTracker() - addExpectedPackagesStore( - store: ExpectedPackagesStore, + constructor(rundownId: RundownId) { + this.#rundownId = rundownId + } + + addExpectedPackagesStore( + store: ExpectedPackagesStore, deleteAll?: boolean ): void { - this.#expectedPackages.addChanges(store.expectedPackagesChanges, deleteAll ?? false) + if (!deleteAll) this.#expectedPackages.push(...store.expectedPackages) this.#expectedPlayoutItems.addChanges(store.expectedPlayoutItemsChanges, deleteAll ?? false) } addSegment(segment: IngestSegmentModelImpl, segmentIsDeleted: boolean): void { @@ -69,7 +82,6 @@ export class SaveIngestModelHelper { commit(context: JobContext): Array> { // Log deleted ids: const deletedIds: { [key: string]: ProtectedString[] } = { - expectedPackages: this.#expectedPackages.getDeletedIds(), expectedPlayoutItems: this.#expectedPlayoutItems.getDeletedIds(), segments: this.#segments.getDeletedIds(), parts: this.#parts.getDeletedIds(), @@ -84,7 +96,7 @@ export class SaveIngestModelHelper { } return [ - context.directCollections.ExpectedPackages.bulkWrite(this.#expectedPackages.generateWriteOps()), + writeExpectedPackagesChangesForRundown(context, this.#rundownId, this.#expectedPackages), context.directCollections.ExpectedPlayoutItems.bulkWrite(this.#expectedPlayoutItems.generateWriteOps()), context.directCollections.Segments.bulkWrite(this.#segments.generateWriteOps()), @@ -95,3 +107,118 @@ export class SaveIngestModelHelper { ] } } + +export async function writeExpectedPackagesChangesForRundown( + context: JobContext, + rundownId: RundownId | null, + documentsToSave: IngestExpectedPackage[] +): Promise { + const existingDocs = (await context.directCollections.ExpectedPackages.findFetch( + { + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + }, + { + projection: { + _id: 1, + playoutSources: 1, // This feels a bit excessive, but the whole object is needed for `isPackageReferencedByPlayout` + }, + } + )) as Pick[] + const existingDocsMap = normalizeArrayToMap(existingDocs, '_id') + + const packagesToSave = new Map>() + for (const doc of documentsToSave) { + const partialDoc = packagesToSave.get(doc.packageId) + + if (partialDoc) { + // Add the source to the existing document + partialDoc.ingestSources.push(doc.source) + + // Maybe this should check for duplicates, but the point where these documents are generated should be handling that. + } else { + // Add a new document + // Future: omit 'playoutSources from this doc + packagesToSave.set(doc.packageId, { + _id: doc.packageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + created: Date.now(), + package: doc.package, + ingestSources: [doc.source], + }) + } + } + + // Generate any insert and update operations + const ops: AnyBulkWriteOperation[] = [] + for (const doc of packagesToSave.values()) { + const existingDoc = existingDocsMap.get(doc._id) + if (!existingDoc) { + // Insert this new document + ops.push({ + insertOne: { + document: { + ...doc, + playoutSources: { + pieceInstanceIds: [], + }, + }, + }, + }) + } else { + // Document already exists, perform an update to preserve other fields + // Future: would it be beneficial to perform some diffing to only update the field if it has changed? + ops.push({ + updateOne: { + filter: { _id: doc._id }, + update: { + // Update every field that we want to define + $set: { + ingestSources: doc.ingestSources, + }, + }, + }, + }) + } + } + + // Look over the existing documents, and see is no longer referenced + const idsToDelete: ExpectedPackageId[] = [] + const idsToClearSources: ExpectedPackageId[] = [] + + for (const doc of existingDocs) { + // Skip if this document is in the list of documents to save + if (packagesToSave.has(doc._id)) continue + + if (isPackageReferencedByPlayout(doc)) { + idsToClearSources.push(doc._id) + } else { + idsToDelete.push(doc._id) + } + } + + if (idsToDelete.length > 0) { + ops.push({ + deleteMany: { + filter: { _id: { $in: idsToDelete as any } }, + }, + }) + } + if (idsToClearSources.length > 0) { + ops.push({ + updateMany: { + filter: { _id: { $in: idsToClearSources as any } }, + update: { + $set: { + ingestSources: [], + }, + }, + }, + }) + } + + if (ops.length > 0) await context.directCollections.ExpectedPackages.bulkWrite(ops) +} diff --git a/packages/job-worker/src/ingest/model/implementation/__tests__/SaveIngestModel.spec.ts b/packages/job-worker/src/ingest/model/implementation/__tests__/SaveIngestModel.spec.ts new file mode 100644 index 0000000000..211f7b3dba --- /dev/null +++ b/packages/job-worker/src/ingest/model/implementation/__tests__/SaveIngestModel.spec.ts @@ -0,0 +1,364 @@ +import { ExpectedPackage } from '@sofie-automation/blueprints-integration' +import { + ExpectedPackageDB, + ExpectedPackageDBType, + ExpectedPackageIngestSourcePiece, + getExpectedPackageId, +} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { PartId, PieceId, PieceInstanceId, RundownId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { protectString } from '@sofie-automation/corelib/dist/protectedString' +import { setupDefaultJobEnvironment } from '../../../../__mocks__/context.js' +import { IngestExpectedPackage } from '../../IngestExpectedPackage.js' +import { writeExpectedPackagesChangesForRundown } from '../SaveIngestModel.js' + +describe('SaveIngestModel', () => { + describe('writeExpectedPackagesChangesForRundown', () => { + const rundownId = protectString('rundown0') + + function createMockExpectedPackage(id: string): ExpectedPackage.ExpectedPackageMediaFile { + return { + _id: id, + type: ExpectedPackage.PackageType.MEDIA_FILE, + layers: ['layer0'], + content: { filePath: `/media/${id}.mp4` }, + version: {}, + contentVersionHash: `hash_${id}`, + sources: [], + sideEffect: {}, + } + } + + function createIngestExpectedPackage( + pkg: ExpectedPackage.Base, + pieceId: string, + partId = 'part0', + segmentId = 'segment0' + ): IngestExpectedPackage { + return { + packageId: getExpectedPackageId(rundownId, pkg), + package: pkg, + source: { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString(pieceId), + partId: protectString(partId), + segmentId: protectString(segmentId), + blueprintPackageId: pkg._id, + listenToPackageInfoUpdates: false, + }, + } + } + + async function createExistingPackage( + context: ReturnType, + pkg: ExpectedPackage.Base, + options?: { + rundownId?: RundownId + ingestSource?: { + pieceId: string + partId?: string + segmentId?: string + } + playoutInstanceIds?: PieceInstanceId[] + created?: number + } + ): Promise { + const packageId = getExpectedPackageId(options?.rundownId ?? rundownId, pkg) + const doc: ExpectedPackageDB = { + _id: packageId, + studioId: context.studioId, + rundownId: options?.rundownId ?? rundownId, + bucketId: null, + created: options?.created ?? Date.now(), + package: pkg, + ingestSources: options?.ingestSource + ? [ + { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString(options.ingestSource.pieceId), + partId: protectString(options.ingestSource.partId ?? 'part0'), + segmentId: protectString(options.ingestSource.segmentId ?? 'segment0'), + blueprintPackageId: pkg._id, + listenToPackageInfoUpdates: false, + }, + ] + : [], + playoutSources: { + pieceInstanceIds: options?.playoutInstanceIds ?? [], + }, + } + await context.directCollections.ExpectedPackages.insertOne(doc) + return doc + } + + it('no documents to save and no existing packages', async () => { + const context = setupDefaultJobEnvironment() + + await writeExpectedPackagesChangesForRundown(context, rundownId, []) + + // Should only findFetch, no bulkWrite needed + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(1) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + }) + + it('inserts new ExpectedPackage when none exist', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg = createMockExpectedPackage('pkg0') + const ingestPackage = createIngestExpectedPackage(expectedPkg, 'piece0') + + await writeExpectedPackagesChangesForRundown(context, rundownId, [ingestPackage]) + + // Verify operations: findFetch + bulkWrite + insertOne + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(3) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + expect(context.mockCollections.ExpectedPackages.operations[1].type).toBe('bulkWrite') + expect(context.mockCollections.ExpectedPackages.operations[2].type).toBe('insertOne') + + // Verify the inserted package + const insertedDoc = await context.directCollections.ExpectedPackages.findOne(ingestPackage.packageId) + expect(insertedDoc).toMatchObject({ + _id: ingestPackage.packageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: expectedPkg, + ingestSources: [ingestPackage.source], + playoutSources: { + pieceInstanceIds: [], + }, + } satisfies Omit) + expect(insertedDoc?.created).toBeGreaterThan(0) + }) + + it('updates existing ExpectedPackage ingestSources', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg = createMockExpectedPackage('pkg0') + const packageId = getExpectedPackageId(rundownId, expectedPkg) + const originalCreated = Date.now() - 10000 + + // Pre-populate with existing package + await createExistingPackage(context, expectedPkg, { + created: originalCreated, + ingestSource: { pieceId: 'oldPiece', partId: 'oldPart', segmentId: 'oldSegment' }, + playoutInstanceIds: [protectString('existingPieceInstance')], + }) + context.mockCollections.ExpectedPackages.clearOpLog() + + // Create new ingest source + const newIngestPackage = createIngestExpectedPackage(expectedPkg, 'newPiece', 'newPart', 'newSegment') + + await writeExpectedPackagesChangesForRundown(context, rundownId, [newIngestPackage]) + + // Verify operations: findFetch + bulkWrite + update + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(3) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + expect(context.mockCollections.ExpectedPackages.operations[1].type).toBe('bulkWrite') + expect(context.mockCollections.ExpectedPackages.operations[2].type).toBe('update') + + // Verify the update + const updatedDoc = await context.directCollections.ExpectedPackages.findOne(packageId) + expect(updatedDoc?.ingestSources).toEqual([newIngestPackage.source]) + // Verify created timestamp was preserved + expect(updatedDoc?.created).toBe(originalCreated) + // Verify playoutSources were preserved + expect(updatedDoc?.playoutSources.pieceInstanceIds).toHaveLength(1) + }) + + it('deletes ExpectedPackage when no longer referenced by ingest or playout', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg = createMockExpectedPackage('pkg0') + const packageId = getExpectedPackageId(rundownId, expectedPkg) + + // Pre-populate with existing package (no playout references) + await createExistingPackage(context, expectedPkg, { + ingestSource: { pieceId: 'piece0' }, + }) + context.mockCollections.ExpectedPackages.clearOpLog() + + // Call with empty documentsToSave + await writeExpectedPackagesChangesForRundown(context, rundownId, []) + + // Verify operations: findFetch + bulkWrite + remove + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(3) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + expect(context.mockCollections.ExpectedPackages.operations[1].type).toBe('bulkWrite') + expect(context.mockCollections.ExpectedPackages.operations[2].type).toBe('remove') + + // Verify it was deleted + expect(await context.directCollections.ExpectedPackages.findOne(packageId)).toBeUndefined() + }) + + it('clears ingestSources but preserves package when still referenced by playout', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg = createMockExpectedPackage('pkg0') + const packageId = getExpectedPackageId(rundownId, expectedPkg) + + // Pre-populate with existing package that has playout references + await createExistingPackage(context, expectedPkg, { + ingestSource: { pieceId: 'piece0' }, + playoutInstanceIds: [protectString('pieceInstance0')], + }) + context.mockCollections.ExpectedPackages.clearOpLog() + + // Call with empty documentsToSave + await writeExpectedPackagesChangesForRundown(context, rundownId, []) + + // Verify operations: findFetch + bulkWrite + update + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(3) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + expect(context.mockCollections.ExpectedPackages.operations[1].type).toBe('bulkWrite') + expect(context.mockCollections.ExpectedPackages.operations[2].type).toBe('update') + + // Verify ingestSources were cleared but document still exists + const updatedDoc = await context.directCollections.ExpectedPackages.findOne(packageId) + expect(updatedDoc).toBeDefined() + expect(updatedDoc?.ingestSources).toEqual([]) + expect(updatedDoc?.playoutSources.pieceInstanceIds).toHaveLength(1) + }) + + it('merges multiple ingest sources for the same package', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg = createMockExpectedPackage('sharedPkg') + const packageId = getExpectedPackageId(rundownId, expectedPkg) + + // Create two sources for the same package + const ingestPackage1 = createIngestExpectedPackage(expectedPkg, 'piece1', 'part1', 'segment1') + const ingestPackage2 = createIngestExpectedPackage(expectedPkg, 'piece2', 'part2', 'segment2') + + await writeExpectedPackagesChangesForRundown(context, rundownId, [ingestPackage1, ingestPackage2]) + + // Verify only one insert (sources should be merged) + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(3) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + expect(context.mockCollections.ExpectedPackages.operations[1].type).toBe('bulkWrite') + expect(context.mockCollections.ExpectedPackages.operations[1].args[0]).toBe(1) // 1 operation + expect(context.mockCollections.ExpectedPackages.operations[2].type).toBe('insertOne') + + // Verify both sources are present + const insertedDoc = await context.directCollections.ExpectedPackages.findOne(packageId) + expect(insertedDoc?.ingestSources).toHaveLength(2) + expect(insertedDoc?.ingestSources).toContainEqual(ingestPackage1.source) + expect(insertedDoc?.ingestSources).toContainEqual(ingestPackage2.source) + }) + + it('handles mix of insert, update, delete, and clear operations', async () => { + const context = setupDefaultJobEnvironment() + + const pkg1 = createMockExpectedPackage('pkg1') // Will be updated + const pkg2 = createMockExpectedPackage('pkg2') // Will be deleted (no playout refs) + const pkg3 = createMockExpectedPackage('pkg3') // Will have sources cleared (has playout refs) + const pkg4 = createMockExpectedPackage('pkg4') // Will be inserted + const packageId1 = getExpectedPackageId(rundownId, pkg1) + const packageId2 = getExpectedPackageId(rundownId, pkg2) + const packageId3 = getExpectedPackageId(rundownId, pkg3) + const packageId4 = getExpectedPackageId(rundownId, pkg4) + + // Setup existing packages + await createExistingPackage(context, pkg1, { + ingestSource: { pieceId: 'oldPiece1', partId: 'oldPart1', segmentId: 'oldSegment1' }, + }) + await createExistingPackage(context, pkg2, { + ingestSource: { pieceId: 'piece2', partId: 'part2', segmentId: 'segment2' }, + }) + await createExistingPackage(context, pkg3, { + ingestSource: { pieceId: 'piece3', partId: 'part3', segmentId: 'segment3' }, + playoutInstanceIds: [protectString('pi3')], + }) + context.mockCollections.ExpectedPackages.clearOpLog() + + // documentsToSave contains: updated pkg1 and new pkg4 + const ingestPackage1 = createIngestExpectedPackage(pkg1, 'newPiece1', 'newPart1', 'newSegment1') + const ingestPackage4 = createIngestExpectedPackage(pkg4, 'piece4', 'part4', 'segment4') + + await writeExpectedPackagesChangesForRundown(context, rundownId, [ingestPackage1, ingestPackage4]) + + // Verify final state + // pkg1: updated + const doc1 = await context.directCollections.ExpectedPackages.findOne(packageId1) + expect(doc1).toBeDefined() + expect((doc1?.ingestSources[0] as ExpectedPackageIngestSourcePiece).pieceId).toBe( + protectString('newPiece1') + ) + + // pkg2: deleted + const doc2 = await context.directCollections.ExpectedPackages.findOne(packageId2) + expect(doc2).toBeUndefined() + + // pkg3: sources cleared but preserved + const doc3 = await context.directCollections.ExpectedPackages.findOne(packageId3) + expect(doc3).toBeDefined() + expect(doc3?.ingestSources).toEqual([]) + expect(doc3?.playoutSources.pieceInstanceIds).toHaveLength(1) + + // pkg4: inserted + const doc4 = await context.directCollections.ExpectedPackages.findOne(packageId4) + expect(doc4).toBeDefined() + expect(doc4?.package).toEqual(pkg4) + }) + + it('preserves playoutSources when updating ingestSources', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg = createMockExpectedPackage('pkg0') + const packageId = getExpectedPackageId(rundownId, expectedPkg) + + // Pre-populate with package that has both ingest and playout sources + await createExistingPackage(context, expectedPkg, { + ingestSource: { pieceId: 'oldPiece', partId: 'oldPart', segmentId: 'oldSegment' }, + playoutInstanceIds: [ + protectString('pieceInstance1'), + protectString('pieceInstance2'), + ], + }) + context.mockCollections.ExpectedPackages.clearOpLog() + + // Update with new ingest source + const newIngestPackage = createIngestExpectedPackage(expectedPkg, 'newPiece', 'newPart', 'newSegment') + + await writeExpectedPackagesChangesForRundown(context, rundownId, [newIngestPackage]) + + // Verify playoutSources were preserved + const updatedDoc = await context.directCollections.ExpectedPackages.findOne(packageId) + expect(updatedDoc?.ingestSources).toEqual([newIngestPackage.source]) + expect(updatedDoc?.playoutSources.pieceInstanceIds).toHaveLength(2) + expect(updatedDoc?.playoutSources.pieceInstanceIds).toContain( + protectString('pieceInstance1') + ) + expect(updatedDoc?.playoutSources.pieceInstanceIds).toContain( + protectString('pieceInstance2') + ) + }) + + it('only affects packages for the specified rundown', async () => { + const context = setupDefaultJobEnvironment() + const otherRundownId = protectString('otherRundown') + + const pkg = createMockExpectedPackage('pkg0') + const packageIdForRundown = getExpectedPackageId(rundownId, pkg) + const packageIdForOtherRundown = getExpectedPackageId(otherRundownId, pkg) + + // Create packages in both rundowns + await createExistingPackage(context, pkg, { + ingestSource: { pieceId: 'piece0' }, + }) + await createExistingPackage(context, pkg, { + rundownId: otherRundownId, + ingestSource: { pieceId: 'piece0' }, + }) + context.mockCollections.ExpectedPackages.clearOpLog() + + // Delete all packages for rundownId by passing empty array + await writeExpectedPackagesChangesForRundown(context, rundownId, []) + + // Verify package for rundownId was deleted + expect(await context.directCollections.ExpectedPackages.findOne(packageIdForRundown)).toBeUndefined() + + // Verify package for otherRundownId still exists + expect(await context.directCollections.ExpectedPackages.findOne(packageIdForOtherRundown)).toBeDefined() + }) + }) +}) diff --git a/packages/job-worker/src/ingest/packageInfo.ts b/packages/job-worker/src/ingest/packageInfo.ts index 6816a9d4d4..aec2f1f590 100644 --- a/packages/job-worker/src/ingest/packageInfo.ts +++ b/packages/job-worker/src/ingest/packageInfo.ts @@ -1,38 +1,11 @@ import { ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { - ExpectedPackagesRegenerateProps, - PackageInfosUpdatedRundownProps, -} from '@sofie-automation/corelib/dist/worker/ingest' +import { PackageInfosUpdatedRundownProps } from '@sofie-automation/corelib/dist/worker/ingest' import { logger } from '../logging.js' import { JobContext } from '../jobs/index.js' import { regenerateSegmentsFromIngestData } from './generationSegment.js' -import { runWithRundownLock } from './lock.js' -import { updateExpectedPackagesForPartModel, updateExpectedPackagesForRundownBaseline } from './expectedPackages.js' -import { loadIngestModelFromRundown } from './model/implementation/LoadIngestModel.js' import { runCustomIngestUpdateOperation } from './runOperation.js' - -/** - * Debug: Regenerate ExpectedPackages for a Rundown - */ -export async function handleExpectedPackagesRegenerate( - context: JobContext, - data: ExpectedPackagesRegenerateProps -): Promise { - return runWithRundownLock(context, data.rundownId, async (rundown, rundownLock) => { - if (!rundown) throw new Error(`Rundown "${data.rundownId}" not found`) - - const ingestModel = await loadIngestModelFromRundown(context, rundownLock, rundown) - - for (const part of ingestModel.getAllOrderedParts()) { - updateExpectedPackagesForPartModel(context, part) - } - - await updateExpectedPackagesForRundownBaseline(context, ingestModel, undefined, true) - - await ingestModel.saveAllToDatabase() - }) -} +import { assertNever } from '@sofie-automation/corelib/dist/lib' /** * Some PackageInfos have been updated, regenerate any Parts which depend on these PackageInfos @@ -58,23 +31,35 @@ export async function handleUpdatedPackageInfoForRundown( let regenerateRundownBaseline = false for (const packageId of data.packageIds) { - const pkg = ingestModel.findExpectedPackage(packageId) - if (pkg) { - if ( - pkg.fromPieceType === ExpectedPackageDBType.PIECE || - pkg.fromPieceType === ExpectedPackageDBType.ADLIB_PIECE || - pkg.fromPieceType === ExpectedPackageDBType.ADLIB_ACTION - ) { - segmentsToUpdate.add(pkg.segmentId) - } else if ( - pkg.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_ACTION || - pkg.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_PIECE || - pkg.fromPieceType === ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS - ) { - regenerateRundownBaseline = true + const pkgIngestSources = ingestModel.findExpectedPackageIngestSources(packageId) + for (const source of pkgIngestSources) { + // Only consider sources that are marked to listen to package info updates + if (!source.listenToPackageInfoUpdates) continue + + switch (source.fromPieceType) { + case ExpectedPackageDBType.PIECE: + case ExpectedPackageDBType.ADLIB_PIECE: + case ExpectedPackageDBType.ADLIB_ACTION: + segmentsToUpdate.add(source.segmentId) + break + + case ExpectedPackageDBType.BASELINE_PIECE: + case ExpectedPackageDBType.BASELINE_ADLIB_ACTION: + case ExpectedPackageDBType.BASELINE_ADLIB_PIECE: + case ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: + regenerateRundownBaseline = true + break + case ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: + case ExpectedPackageDBType.BUCKET_ADLIB: + case ExpectedPackageDBType.BUCKET_ADLIB_ACTION: + // Ignore + break + default: + assertNever(source) } - } else { - logger.warn(`onUpdatedPackageInfoForRundown: Missing package: "${packageId}"`) + } + if (pkgIngestSources.length === 0) { + logger.warn(`onUpdatedPackageInfoForRundown: Missing ingestSources for package: "${packageId}"`) } } diff --git a/packages/job-worker/src/ingest/runOperation.ts b/packages/job-worker/src/ingest/runOperation.ts index b6353960dd..3bdb1bbf0a 100644 --- a/packages/job-worker/src/ingest/runOperation.ts +++ b/packages/job-worker/src/ingest/runOperation.ts @@ -1,4 +1,4 @@ -import { IngestModel, IngestModelReadonly } from './model/IngestModel.js' +import { IngestDatabasePersistedModel, IngestModel, IngestModelReadonly } from './model/IngestModel.js' import { BeforeIngestOperationPartMap, CommitIngestOperation } from './commit.js' import { SofieIngestRundownDataCache, SofieIngestRundownDataCacheGenerator } from './sofieIngestCache.js' import { canRundownBeUpdated, getRundownId, getSegmentId } from './lib.js' @@ -8,7 +8,6 @@ import { UserError, UserErrorMessage } from '@sofie-automation/corelib/dist/erro import { loadIngestModelFromRundownExternalId } from './model/implementation/LoadIngestModel.js' import { Complete, clone } from '@sofie-automation/corelib/dist/lib' import { CommitIngestData, runWithRundownLockWithoutFetchingRundown } from './lock.js' -import { DatabasePersistedModel } from '../modelBase.js' import { NrcsIngestChangeDetails, IngestRundown, @@ -352,7 +351,7 @@ function sortIngestRundown(rundown: IngestRundown): void { async function updateSofieRundownModel( context: JobContext, - pIngestModel: Promise, + pIngestModel: Promise, computedIngestChanges: ComputedIngestChanges | null ) { const ingestModel = await pIngestModel diff --git a/packages/job-worker/src/ipc.ts b/packages/job-worker/src/ipc.ts index 45fbf89324..4b8b938846 100644 --- a/packages/job-worker/src/ipc.ts +++ b/packages/job-worker/src/ipc.ts @@ -10,14 +10,8 @@ import { getPrometheusMetricsString, setupPrometheusMetrics } from '@sofie-autom */ class IpcJobManager implements JobManager { constructor( - public readonly jobFinished: ( - id: string, - startedTime: number, - finishedTime: number, - error: any, - result: any - ) => Promise, - public readonly queueJob: (queueName: string, jobName: string, jobData: unknown) => Promise, + public readonly jobFinished: JobManager['jobFinished'], + public readonly queueJob: JobManager['queueJob'], private readonly interruptJobStream: (queueName: string) => Promise, private readonly waitForNextJob: (queueName: string) => Promise, private readonly getNextJob: (queueName: string) => Promise @@ -43,11 +37,11 @@ class IpcJobManager implements JobManager { export class IpcJobWorker extends JobWorkerBase { constructor( workerId: WorkerId, - jobFinished: (id: string, startedTime: number, finishedTime: number, error: any, result: any) => Promise, + jobFinished: JobManager['jobFinished'], interruptJobStream: (queueName: string) => Promise, waitForNextJob: (queueName: string) => Promise, getNextJob: (queueName: string) => Promise, - queueJob: (queueName: string, jobName: string, jobData: unknown) => Promise, + queueJob: JobManager['queueJob'], logLine: (msg: LogEntry) => Promise, fastTrackTimeline: FastTrackTimelineFunc, enableFreezeLimit: boolean diff --git a/packages/job-worker/src/jobs/index.ts b/packages/job-worker/src/jobs/index.ts index aebd415538..78bd930ea6 100644 --- a/packages/job-worker/src/jobs/index.ts +++ b/packages/job-worker/src/jobs/index.ts @@ -24,6 +24,19 @@ export { ApmSpan } export { ProcessedShowStyleVariant, ProcessedShowStyleBase, ProcessedShowStyleCompound } export { JobStudio } +export interface QueueJobOptions { + /** + * The job should be run with a low priority, allowing other operations to be run first + */ + lowPriority?: boolean + + /** + * Debounce execution, delaying execution for at least this wait time (in ms). + * If the job is already queued, it will not be queued again + */ + debounce?: number +} + /** * Context for any job run in the job-worker */ @@ -54,7 +67,11 @@ export interface JobContext extends StudioCacheContext { * @param data Data for the job * @returns Promise which resolves once successfully queued */ - queueStudioJob(name: T, data: Parameters[0]): Promise + queueStudioJob( + name: T, + data: Parameters[0], + options?: QueueJobOptions + ): Promise /** * Queue an Event job to be run * It is not possible to wait for the result. This ensures the threads don't get deadlocked diff --git a/packages/job-worker/src/manager.ts b/packages/job-worker/src/manager.ts index ef7bc8697d..745ba868dd 100644 --- a/packages/job-worker/src/manager.ts +++ b/packages/job-worker/src/manager.ts @@ -1,5 +1,6 @@ -import { WorkerId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { JobSpec } from './main.js' +import type { WorkerId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import type { JobSpec } from './main.js' +import type { QueueJobOptions } from './jobs/index.js' export interface JobManager { jobFinished: ( @@ -10,7 +11,12 @@ export interface JobManager { result: any ) => Promise // getNextJob: (queueName: string) => Promise - queueJob: (queueName: string, jobName: string, jobData: unknown) => Promise + queueJob: ( + queueName: string, + jobName: string, + jobData: unknown, + options: QueueJobOptions | undefined + ) => Promise subscribeToQueue: (queueName: string, workerId: WorkerId) => JobStream } diff --git a/packages/job-worker/src/playout/__tests__/expectedPackages.test.ts b/packages/job-worker/src/playout/__tests__/expectedPackages.test.ts new file mode 100644 index 0000000000..dd2c1888f8 --- /dev/null +++ b/packages/job-worker/src/playout/__tests__/expectedPackages.test.ts @@ -0,0 +1,509 @@ +import { + ExpectedPackageId, + PieceInstanceId, + RundownId, + RundownPlaylistId, +} from '@sofie-automation/corelib/dist/dataModel/Ids' +import { protectString, protectStringArray } from '@sofie-automation/corelib/dist/protectedString' +import { MockJobContext, setupDefaultJobEnvironment } from '../../__mocks__/context.js' +import { setupDefaultRundownPlaylist, setupMockShowStyleCompound } from '../../__mocks__/presetCollections.js' +import { handleCleanupOrphanedExpectedPackageReferences } from '../expectedPackages.js' +import { ExpectedPackageDB, ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' +import { getCurrentTime } from '../../lib/index.js' + +describe('handleCleanupOrphanedExpectedPackageReferences', () => { + let context: MockJobContext + let rundownId: RundownId + let playlistId: RundownPlaylistId + + beforeEach(async () => { + context = setupDefaultJobEnvironment() + + // Setup showstyle so we can create a rundown + await setupMockShowStyleCompound(context) + const result = await setupDefaultRundownPlaylist(context) + rundownId = result.rundownId + playlistId = result.playlistId + }) + + function createMockExpectedPackage( + id: string, + pieceInstanceIds: string[], + ingestSourceCount: number = 0 + ): ExpectedPackageDB { + const ingestSources = [] + for (let i = 0; i < ingestSourceCount; i++) { + ingestSources.push({ + fromPieceType: ExpectedPackageDBType.PIECE, + blueprintPackageId: `blueprint_${id}_${i}`, + listenToPackageInfoUpdates: false, + pieceId: protectString(`piece_${id}_${i}`), + partId: protectString(`part_${id}_${i}`), + segmentId: protectString(`segment_${id}_${i}`), + }) + } + + return { + _id: protectString(id), + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + created: getCurrentTime(), + package: { + _id: id, + contentVersionHash: 'hash1', + type: 'media_file' as any, + content: {}, + version: {}, + sources: [], + layers: [], + sideEffect: {}, + }, + ingestSources: ingestSources as ExpectedPackageDB['ingestSources'], + playoutSources: { + pieceInstanceIds: protectStringArray(pieceInstanceIds), + }, + } + } + + function createMockPieceInstance( + id: string, + neededPackageIds: string[] = [], + reset: boolean = false + ): Partial { + return { + _id: protectString(id), + rundownId: rundownId, + partInstanceId: protectString('partInstance_0'), + playlistActivationId: protectString('activation_0'), + reset: reset, + neededExpectedPackageIds: protectStringArray(neededPackageIds), + piece: { + _id: protectString(`piece_${id}`), + startPartId: protectString('part_0'), + externalId: `MOCK_PIECE_${id}`, + name: `Piece ${id}`, + lifespan: 'WithinPart' as any, + invalid: false, + enable: { start: 0 }, + sourceLayerId: 'source0', + outputLayerId: 'output0', + content: {}, + timelineObjectsString: '' as any, + pieceType: 'Normal' as any, + }, + } + } + + test('does nothing when there are no expected packages', async () => { + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([]) + }) + + test('does nothing when all package references are valid', async () => { + // Create piece instances that reference expected packages + await context.mockCollections.PieceInstances.insertOne( + createMockPieceInstance('pieceInstance1', ['package1']) as any + ) + await context.mockCollections.PieceInstances.insertOne( + createMockPieceInstance('pieceInstance2', ['package2']) as any + ) + + // Create expected packages that are referenced by valid piece instances + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package1', ['pieceInstance1']) + ) + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package2', ['pieceInstance2']) + ) + + // Clear operations from setup + context.mockCollections.ExpectedPackages.clearOpLog() + + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages (only read, no writes) + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([]) + + // Verify packages remain unchanged + const packages = await context.directCollections.ExpectedPackages.findFetch({}) + expect(packages).toHaveLength(2) + expect(packages.find((p) => p._id === protectString('package1'))?.playoutSources.pieceInstanceIds).toEqual([ + protectString('pieceInstance1'), + ]) + expect(packages.find((p) => p._id === protectString('package2'))?.playoutSources.pieceInstanceIds).toEqual([ + protectString('pieceInstance2'), + ]) + }) + + test('removes orphaned package reference when piece instance no longer exists', async () => { + // Create expected package that references a piece instance that doesn't exist + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package1', ['nonExistentPieceInstance'], 1) // has ingest source + ) + + // Clear operations from setup + context.mockCollections.ExpectedPackages.clearOpLog() + + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([ + { type: 'bulkWrite', args: [1] }, + { type: 'update', args: [{ _id: 'package1' }, { $set: { 'playoutSources.pieceInstanceIds': [] } }] }, + ]) + }) + + test('deletes package when all references are orphaned and no ingest sources', async () => { + // Create expected package with no ingest sources and orphaned piece instance + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package1', ['nonExistentPieceInstance'], 0) // no ingest sources + ) + + // Clear operations from setup + context.mockCollections.ExpectedPackages.clearOpLog() + + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([ + { type: 'bulkWrite', args: [1] }, + { type: 'removeOne', args: [{ _id: 'package1' }] }, + ]) + }) + + test('removes only orphaned references when partial removal is needed', async () => { + // Create a valid piece instance + await context.mockCollections.PieceInstances.insertOne( + createMockPieceInstance('validPieceInstance', ['package1']) as any + ) + + // Create expected package that references both valid and invalid piece instances + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package1', ['validPieceInstance', 'orphanedPieceInstance']) + ) + + // Clear operations from setup + context.mockCollections.ExpectedPackages.clearOpLog() + + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([ + { type: 'bulkWrite', args: [1] }, + { + type: 'update', + args: [ + { _id: 'package1' }, + { $pull: { 'playoutSources.pieceInstanceIds': { $in: ['orphanedPieceInstance'] } } }, + ], + }, + ]) + + // Verify the result - only valid reference remains + const packages = await context.directCollections.ExpectedPackages.findFetch({}) + expect(packages).toHaveLength(1) + expect(packages[0].playoutSources.pieceInstanceIds).toEqual([protectString('validPieceInstance')]) + }) + + test('removes reference when piece instance exists but does not reference the package', async () => { + // Create piece instance that references a different package + await context.mockCollections.PieceInstances.insertOne( + createMockPieceInstance('pieceInstance1', ['differentPackage']) as any + ) + + // Create expected package that references the piece instance, but piece instance doesn't reference it back + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package1', ['pieceInstance1'], 1) // has ingest source + ) + + // Clear operations from setup + context.mockCollections.ExpectedPackages.clearOpLog() + + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([ + { type: 'bulkWrite', args: [1] }, + { type: 'update', args: [{ _id: 'package1' }, { $set: { 'playoutSources.pieceInstanceIds': [] } }] }, + ]) + }) + + test('deletes package when reset piece instance references the package', async () => { + // Create a reset piece instance + await context.mockCollections.PieceInstances.insertOne( + createMockPieceInstance('resetPieceInstance', ['package1'], true) as any + ) + + // Create expected package that references the reset piece instance + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package1', ['resetPieceInstance'], 0) // no ingest sources + ) + + // Clear operations from setup + context.mockCollections.ExpectedPackages.clearOpLog() + + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([ + { type: 'bulkWrite', args: [1] }, + { type: 'removeOne', args: [{ _id: 'package1' }] }, + ]) + }) + + test('handles multiple packages with mixed scenarios', async () => { + // Create valid piece instances + await context.mockCollections.PieceInstances.insertOne( + createMockPieceInstance('pieceInstance1', ['package1', 'package3']) as any + ) + await context.mockCollections.PieceInstances.insertOne( + createMockPieceInstance('pieceInstance2', ['package2']) as any + ) + + // Package1: valid reference, should be kept as-is + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package1', ['pieceInstance1']) + ) + + // Package2: valid reference, should be kept as-is + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package2', ['pieceInstance2']) + ) + + // Package3: valid + orphaned reference, should have orphaned removed + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package3', ['pieceInstance1', 'orphanedInstance']) + ) + + // Package4: all orphaned, no ingest sources, should be deleted + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package4', ['orphanedInstance1', 'orphanedInstance2'], 0) + ) + + // Package5: all orphaned, has ingest sources, should be kept with empty pieceInstanceIds + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package5', ['orphanedInstance3'], 1) + ) + + // Clear operations from setup + context.mockCollections.ExpectedPackages.clearOpLog() + + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([ + { type: 'bulkWrite', args: [3] }, + { + type: 'update', + args: [ + { _id: 'package3' }, + { $pull: { 'playoutSources.pieceInstanceIds': { $in: ['orphanedInstance'] } } }, + ], + }, + { type: 'removeOne', args: [{ _id: 'package4' }] }, + { type: 'update', args: [{ _id: 'package5' }, { $set: { 'playoutSources.pieceInstanceIds': [] } }] }, + ]) + + const packages = await context.directCollections.ExpectedPackages.findFetch({}) + expect(packages).toHaveLength(4) // package4 is deleted + + const package1Id = protectString('package1') + const package2Id = protectString('package2') + const package3Id = protectString('package3') + const package4Id = protectString('package4') + const package5Id = protectString('package5') + + const package1 = packages.find((p) => p._id === package1Id) + const package2 = packages.find((p) => p._id === package2Id) + const package3 = packages.find((p) => p._id === package3Id) + const package4 = packages.find((p) => p._id === package4Id) + const package5 = packages.find((p) => p._id === package5Id) + + // Packages with only valid references should be unchanged + expect(package1?.playoutSources.pieceInstanceIds).toEqual([protectString('pieceInstance1')]) + expect(package2?.playoutSources.pieceInstanceIds).toEqual([protectString('pieceInstance2')]) + // Package3 should have orphaned reference removed, valid one kept + expect(package3?.playoutSources.pieceInstanceIds).toEqual([protectString('pieceInstance1')]) + // Package4 should be deleted + expect(package4).toBeUndefined() + // Package5 should have pieceInstanceIds cleared + expect(package5?.playoutSources.pieceInstanceIds).toEqual([]) + }) + + test('deletes rundown package when orphaned while keeping bucket and other rundown packages', async () => { + const otherRundownId = protectString('otherRundown') + + // Create expected package for a different rundown (should not be affected) + const otherRundownPackage = createMockExpectedPackage('packageOther', ['orphanedPieceInstance']) + otherRundownPackage.rundownId = otherRundownId + await context.mockCollections.ExpectedPackages.insertOne(otherRundownPackage) + + // Create expected package for current rundown with orphaned reference + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('packageCurrent', ['orphanedPieceInstance'], 0) + ) + + // Clear operations from setup + context.mockCollections.ExpectedPackages.clearOpLog() + + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([ + { type: 'bulkWrite', args: [1] }, + { type: 'removeOne', args: [{ _id: 'packageCurrent' }] }, + ]) + + const packages = await context.directCollections.ExpectedPackages.findFetch({}) + // packageCurrent is deleted, packageOther remains + expect(packages).toHaveLength(1) + + const packageOther = packages.find((p) => p._id === protectString('packageOther')) + const packageCurrent = packages.find((p) => p._id === protectString('packageCurrent')) + + // packageOther should be untouched (has different rundownId) + expect(packageOther?.playoutSources.pieceInstanceIds).toEqual([protectString('orphanedPieceInstance')]) + // packageCurrent should be deleted + expect(packageCurrent).toBeUndefined() + }) + + test('deletes rundown package when orphaned while keeping bucket packages', async () => { + // Create a bucket package (should not be affected since it has bucketId set) + const bucketPackage = createMockExpectedPackage('bucketPackage', ['orphanedPieceInstance']) + bucketPackage.rundownId = null + bucketPackage.bucketId = protectString('bucket1') + await context.mockCollections.ExpectedPackages.insertOne(bucketPackage) + + // Create expected package for current rundown with orphaned reference + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('rundownPackage', ['orphanedPieceInstance'], 0) + ) + + // Clear operations from setup + context.mockCollections.ExpectedPackages.clearOpLog() + + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([ + { type: 'bulkWrite', args: [1] }, + { type: 'removeOne', args: [{ _id: 'rundownPackage' }] }, + ]) + + const packages = await context.directCollections.ExpectedPackages.findFetch({}) + // rundownPackage is deleted, bucketPackage remains + expect(packages).toHaveLength(1) + + const bucketPkg = packages.find((p) => p._id === protectString('bucketPackage')) + const rundownPkg = packages.find((p) => p._id === protectString('rundownPackage')) + + // bucketPackage should be untouched (has different bucketId, not matched by query) + expect(bucketPkg?.playoutSources.pieceInstanceIds).toEqual([protectString('orphanedPieceInstance')]) + // rundownPackage should be deleted + expect(rundownPkg).toBeUndefined() + }) + + test('handles package with no piece instance references', async () => { + // Create expected package with no piece instance references + await context.mockCollections.ExpectedPackages.insertOne(createMockExpectedPackage('package1', [], 1)) + + // Clear operations from setup + context.mockCollections.ExpectedPackages.clearOpLog() + + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages + // The function updates even if pieceInstanceIds is already empty + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([ + { type: 'bulkWrite', args: [1] }, + { type: 'update', args: [{ _id: 'package1' }, { $set: { 'playoutSources.pieceInstanceIds': [] } }] }, + ]) + }) + + test('handles piece instance with no neededExpectedPackageIds', async () => { + // Create piece instance with no neededExpectedPackageIds + const pieceInstance = createMockPieceInstance('pieceInstance1', []) + delete pieceInstance.neededExpectedPackageIds + await context.mockCollections.PieceInstances.insertOne(pieceInstance as any) + + // Create expected package that references this piece instance + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package1', ['pieceInstance1'], 1) + ) + + // Clear operations from setup + context.mockCollections.ExpectedPackages.clearOpLog() + + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([ + { type: 'bulkWrite', args: [1] }, + { type: 'update', args: [{ _id: 'package1' }, { $set: { 'playoutSources.pieceInstanceIds': [] } }] }, + ]) + }) +}) diff --git a/packages/job-worker/src/playout/adlibAction.ts b/packages/job-worker/src/playout/adlibAction.ts index 36eaa59a10..cb1a602079 100644 --- a/packages/job-worker/src/playout/adlibAction.ts +++ b/packages/job-worker/src/playout/adlibAction.ts @@ -76,17 +76,6 @@ export async function executeAdlibActionAndSaveModel( throw UserError.create(UserErrorMessage.ActionsNotSupported) } - const watchedPackages = await WatchedPackagesHelper.create(context, { - pieceId: data.actionDocId, - fromPieceType: { - $in: [ - ExpectedPackageDBType.ADLIB_ACTION, - ExpectedPackageDBType.BASELINE_ADLIB_ACTION, - ExpectedPackageDBType.BUCKET_ADLIB_ACTION, - ], - }, - }) - const [adLibAction, baselineAdLibAction, bucketAdLibAction] = await Promise.all([ context.directCollections.AdLibActions.findOne(data.actionDocId as AdLibActionId, { projection: { _id: 1, privateData: 1 }, @@ -103,6 +92,27 @@ export async function executeAdlibActionAndSaveModel( ]) const adLibActionDoc = adLibAction ?? baselineAdLibAction ?? bucketAdLibAction + if (adLibActionDoc && adLibActionDoc.invalid) + throw UserError.from( + new Error(`Cannot take invalid AdLib Action "${adLibActionDoc._id}"!`), + UserErrorMessage.AdlibUnplayable + ) + + let watchedPackages = WatchedPackagesHelper.empty(context) + if (adLibActionDoc && 'rundownId' in adLibActionDoc) { + watchedPackages = await WatchedPackagesHelper.create(context, adLibActionDoc.rundownId, null, { + fromPieceType: { + $in: [ExpectedPackageDBType.ADLIB_ACTION, ExpectedPackageDBType.BASELINE_ADLIB_ACTION], + }, + pieceId: data.actionDocId, + }) + } else if (adLibActionDoc && 'bucketId' in adLibActionDoc) { + watchedPackages = await WatchedPackagesHelper.create(context, null, adLibActionDoc.bucketId, { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION, + pieceId: data.actionDocId, + }) + } + const actionParameters: ExecuteActionParameters = { actionId: data.actionId, userData: data.userData, diff --git a/packages/job-worker/src/playout/expectedPackages.ts b/packages/job-worker/src/playout/expectedPackages.ts new file mode 100644 index 0000000000..0fc07f795d --- /dev/null +++ b/packages/job-worker/src/playout/expectedPackages.ts @@ -0,0 +1,133 @@ +import type { CleanupOrphanedExpectedPackageReferencesProps } from '@sofie-automation/corelib/dist/worker/studio' +import type { JobContext } from '../jobs/index.js' +import { runWithPlaylistLock } from './lock.js' +import { + ExpectedPackageDB, + isPackageReferencedByPlayout, +} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' +import { AnyBulkWriteOperation } from 'mongodb' +import { ExpectedPackageId, PieceInstanceId } from '@sofie-automation/corelib/dist/dataModel/Ids' + +export async function handleCleanupOrphanedExpectedPackageReferences( + context: JobContext, + data: CleanupOrphanedExpectedPackageReferencesProps +): Promise { + // Something has changed in the PieceInstances, we need to check that the ExpectedPackages have only valid PieceInstances as owners, and remove any which no longer have owners + + await runWithPlaylistLock(context, data.playlistId, async () => { + const [existingPackages, validPieceInstances] = await Promise.all([ + context.directCollections.ExpectedPackages.findFetch( + { + studioId: context.studioId, + rundownId: data.rundownId, + bucketId: null, + }, + { + projection: { + _id: 1, + playoutSources: 1, + // We only need to know if there are any entries, so project them to be as minimal as possible + 'ingestSources.fromPieceType': 1, + }, + } + ) as Promise< + Array< + Pick & { + ingestSources: unknown[] + } + > + >, + context.directCollections.PieceInstances.findFetch( + { + rundownId: data.rundownId, + reset: { $ne: true }, + }, + { + projection: { + _id: 1, + neededExpectedPackageIds: 1, + }, + } + ) as Promise>>, + ]) + + const pieceInstancePackageMap = new Map>() + for (const pieceInstance of validPieceInstances) { + if (pieceInstance.neededExpectedPackageIds && pieceInstance.neededExpectedPackageIds.length > 0) + pieceInstancePackageMap.set(pieceInstance._id, new Set(pieceInstance.neededExpectedPackageIds)) + } + + const writeOps: AnyBulkWriteOperation[] = [] + + for (const expectedPackage of existingPackages) { + // Find the pieceInstanceIds that are stale + const pieceInstanceIdsToRemove: PieceInstanceId[] = [] + for (const pieceInstanceId of expectedPackage.playoutSources.pieceInstanceIds) { + const pieceInstancePackageIds = pieceInstancePackageMap.get(pieceInstanceId) + if (!pieceInstancePackageIds || !pieceInstancePackageIds.has(expectedPackage._id)) { + // This pieceInstanceId is no longer valid, queue it to be removed + pieceInstanceIdsToRemove.push(pieceInstanceId) + } + } + + // Queue the write + if (pieceInstanceIdsToRemove.length === expectedPackage.playoutSources.pieceInstanceIds.length) { + // It looks like all the pieceInstanceIds are being removed + + if ( + expectedPackage.ingestSources.length === 0 && + !isPackageReferencedByPlayout({ + // Test with a fake package + ...expectedPackage, + playoutSources: { + ...expectedPackage.playoutSources, + pieceInstanceIds: [], + }, + }) + ) { + // This package is not referenced by anything, so we can delete it + writeOps.push({ + deleteOne: { + filter: { + _id: expectedPackage._id, + }, + }, + }) + } else { + // This package is still referenced by something, so we need to keep it + writeOps.push({ + updateOne: { + filter: { + _id: expectedPackage._id, + }, + update: { + $set: { + 'playoutSources.pieceInstanceIds': [], + }, + }, + }, + }) + } + } else if (pieceInstanceIdsToRemove.length > 0) { + // Some of the pieceInstanceIds are being removed + writeOps.push({ + updateOne: { + filter: { + _id: expectedPackage._id, + }, + update: { + $pull: { + 'playoutSources.pieceInstanceIds': { $in: pieceInstanceIdsToRemove }, + }, + }, + }, + }) + } + } + + if (writeOps.length > 0) { + await context.directCollections.ExpectedPackages.bulkWrite(writeOps) + } + }) +} diff --git a/packages/job-worker/src/playout/model/PlayoutPartInstanceModel.ts b/packages/job-worker/src/playout/model/PlayoutPartInstanceModel.ts index 6cb43e43e1..a247fcb0f0 100644 --- a/packages/job-worker/src/playout/model/PlayoutPartInstanceModel.ts +++ b/packages/job-worker/src/playout/model/PlayoutPartInstanceModel.ts @@ -11,6 +11,7 @@ import { IBlueprintMutatablePart, PieceLifespan, Time } from '@sofie-automation/ import { PartCalculatedTimings } from '@sofie-automation/corelib/dist/playout/timings' import { PlayoutPieceInstanceModel } from './PlayoutPieceInstanceModel.js' import { CoreUserEditingDefinition } from '@sofie-automation/corelib/dist/dataModel/UserEditingDefinitions' +import { PartInvalidReason } from '@sofie-automation/corelib/dist/dataModel/Part' /** * Token returned when making a backup copy of a PlayoutPartInstanceModel @@ -56,6 +57,14 @@ export interface PlayoutPartInstanceModel { */ blockTakeUntil(timestamp: Time | null): void + /** + * Set the invalid reason for this PartInstance. + * This indicates a runtime validation issue that prevents taking the part. + * This is distinct from the planned `invalidReason` on the Part itself. + * @param reason The reason the part is invalid, or undefined to clear + */ + setInvalidReason(reason: PartInvalidReason | undefined): void + /** * Get a PieceInstance which belongs to this PartInstance * @param id Id of the PieceInstance diff --git a/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts b/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts index 76086bd98c..52253f1a2f 100644 --- a/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts +++ b/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts @@ -37,7 +37,13 @@ import _ from 'underscore' import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { PlaylistLock } from '../../../jobs/lock.js' import { logger } from '../../../logging.js' -import { clone, getRandomId, literal, normalizeArrayToMapFunc } from '@sofie-automation/corelib/dist/lib' +import { + clone, + getRandomId, + groupByToMapFunc, + literal, + normalizeArrayToMapFunc, +} from '@sofie-automation/corelib/dist/lib' import { sleep } from '@sofie-automation/shared-lib/dist/lib/lib' import { sortRundownIDsInPlaylist } from '@sofie-automation/corelib/dist/playout/playlist' import { PlayoutRundownModel } from '../PlayoutRundownModel.js' @@ -50,10 +56,13 @@ import { protectString } from '@sofie-automation/shared-lib/dist/lib/protectedSt import { queuePartInstanceTimingEvent } from '../../timings/events.js' import { IS_PRODUCTION } from '../../../environment.js' import { DeferredAfterSaveFunction, DeferredFunction, PlayoutModel, PlayoutModelReadonly } from '../PlayoutModel.js' -import { writePartInstancesAndPieceInstances, writeAdlibTestingSegments } from './SavePlayoutModel.js' +import { + writePartInstancesAndPieceInstances, + writeAdlibTestingSegments, + writeExpectedPackagesForPlayoutSources, +} from './SavePlayoutModel.js' import { PlayoutPieceInstanceModel } from '../PlayoutPieceInstanceModel.js' import { DatabasePersistedModel } from '../../../modelBase.js' -import { ExpectedPackageDBFromStudioBaselineObjects } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItemStudio } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' import { StudioBaselineHelper } from '../../../studio/model/StudioBaselineHelper.js' import { QuickLoopService } from '../services/QuickLoopService.js' @@ -61,6 +70,7 @@ import { calculatePartTimings, PartCalculatedTimings } from '@sofie-automation/c import { PieceInstanceWithTimings } from '@sofie-automation/corelib/dist/playout/processAndPrune' import { NotificationsModelHelper } from '../../../notifications/NotificationsModelHelper.js' import { getExpectedLatency } from '@sofie-automation/corelib/dist/studio/playout' +import { ExpectedPackage } from '@sofie-automation/blueprints-integration' export class PlayoutModelReadonlyImpl implements PlayoutModelReadonly { public readonly playlistId: RundownPlaylistId @@ -696,12 +706,20 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou } this.#timelineHasChanged = false + const partInstancesByRundownId = groupByToMapFunc( + Array.from(this.allPartInstances.values()).filter((p) => !!p), + (p) => p.partInstance.rundownId + ) + await Promise.all([ this.#playlistHasChanged ? this.context.directCollections.RundownPlaylists.replace(this.playlistImpl) : undefined, ...writePartInstancesAndPieceInstances(this.context, this.allPartInstances), writeAdlibTestingSegments(this.context, this.rundownsImpl), + ...Array.from(partInstancesByRundownId.entries()).map(async ([rundownId, partInstances]) => + writeExpectedPackagesForPlayoutSources(this.context, this.playlistId, rundownId, partInstances) + ), this.#baselineHelper.saveAllToDatabase(), this.#notificationsHelper.saveAllToDatabase(), this.context.saveRouteSetChanges(), @@ -841,7 +859,7 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou return this.timelineImpl } - setExpectedPackagesForStudioBaseline(packages: ExpectedPackageDBFromStudioBaselineObjects[]): void { + setExpectedPackagesForStudioBaseline(packages: ExpectedPackage.Any[]): void { this.#baselineHelper.setExpectedPackages(packages) } setExpectedPlayoutItemsForStudioBaseline(playoutItems: ExpectedPlayoutItemStudio[]): void { diff --git a/packages/job-worker/src/playout/model/implementation/PlayoutPartInstanceModelImpl.ts b/packages/job-worker/src/playout/model/implementation/PlayoutPartInstanceModelImpl.ts index 6294c5c00c..06e662494e 100644 --- a/packages/job-worker/src/playout/model/implementation/PlayoutPartInstanceModelImpl.ts +++ b/packages/job-worker/src/playout/model/implementation/PlayoutPartInstanceModelImpl.ts @@ -25,7 +25,7 @@ import { PlayoutPieceInstanceModel } from '../PlayoutPieceInstanceModel.js' import { PlayoutPieceInstanceModelImpl } from './PlayoutPieceInstanceModelImpl.js' import { EmptyPieceTimelineObjectsBlob } from '@sofie-automation/corelib/dist/dataModel/Piece' import _ from 'underscore' -import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' +import { DBPart, PartInvalidReason } from '@sofie-automation/corelib/dist/dataModel/Part' import { PlayoutMutatablePartSampleKeys } from '../../../blueprints/context/lib.js' import { QuickLoopService } from '../services/QuickLoopService.js' @@ -217,6 +217,10 @@ export class PlayoutPartInstanceModelImpl implements PlayoutPartInstanceModel { this.#compareAndSetPartInstanceValue('blockTakeUntil', timestamp ?? undefined) } + setInvalidReason(reason: PartInvalidReason | undefined): void { + this.#compareAndSetPartInstanceValue('invalidReason', reason) + } + getPieceInstance(id: PieceInstanceId): PlayoutPieceInstanceModel | undefined { return this.pieceInstancesImpl.get(id) ?? undefined } diff --git a/packages/job-worker/src/playout/model/implementation/PlayoutPieceInstanceModelImpl.ts b/packages/job-worker/src/playout/model/implementation/PlayoutPieceInstanceModelImpl.ts index 4f10a8ec79..6c1f5a9588 100644 --- a/packages/job-worker/src/playout/model/implementation/PlayoutPieceInstanceModelImpl.ts +++ b/packages/job-worker/src/playout/model/implementation/PlayoutPieceInstanceModelImpl.ts @@ -1,10 +1,11 @@ -import { PieceInstanceInfiniteId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { ExpectedPackageId, PieceInstanceInfiniteId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { ReadonlyDeep } from 'type-fest' import { PieceInstance, PieceInstancePiece } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' import { clone, getRandomId } from '@sofie-automation/corelib/dist/lib' -import { Time } from '@sofie-automation/blueprints-integration' +import { ExpectedPackage, Time } from '@sofie-automation/blueprints-integration' import { PlayoutPieceInstanceModel } from '../PlayoutPieceInstanceModel.js' import _ from 'underscore' +import { getExpectedPackageId } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' export class PlayoutPieceInstanceModelImpl implements PlayoutPieceInstanceModel { /** @@ -13,6 +14,8 @@ export class PlayoutPieceInstanceModelImpl implements PlayoutPieceInstanceModel */ PieceInstanceImpl: PieceInstance + updatedExpectedPackages: Map> | null + /** * Set/delete a value for this PieceInstance, and track that there are changes * @param key Property key @@ -26,6 +29,16 @@ export class PlayoutPieceInstanceModelImpl implements PlayoutPieceInstanceModel } this.#hasChanges = true + + // Updating the 'piece' has side effects on the expectedPackages + if (key === 'piece') { + const newPiece = newValue as PieceInstance['piece'] | undefined + this.updatedExpectedPackages = createExpectedPackagesMap( + this.PieceInstanceImpl.rundownId, + newPiece?.expectedPackages + ) + this.PieceInstanceImpl.neededExpectedPackageIds = Array.from(this.updatedExpectedPackages.keys()) + } } /** @@ -57,7 +70,7 @@ export class PlayoutPieceInstanceModelImpl implements PlayoutPieceInstanceModel * Whether this PieceInstance has unsaved changes */ get HasChanges(): boolean { - return this.#hasChanges + return this.#hasChanges || !!this.updatedExpectedPackages } /** @@ -71,9 +84,19 @@ export class PlayoutPieceInstanceModelImpl implements PlayoutPieceInstanceModel return this.PieceInstanceImpl } - constructor(pieceInstances: PieceInstance, hasChanges: boolean) { - this.PieceInstanceImpl = pieceInstances + constructor(pieceInstance: PieceInstance, hasChanges: boolean) { + this.PieceInstanceImpl = pieceInstance this.#hasChanges = hasChanges + + if (hasChanges) { + this.updatedExpectedPackages = createExpectedPackagesMap( + pieceInstance.rundownId, + pieceInstance.piece.expectedPackages + ) + this.PieceInstanceImpl.neededExpectedPackageIds = Array.from(this.updatedExpectedPackages.keys()) + } else { + this.updatedExpectedPackages = null + } } /** @@ -137,3 +160,16 @@ export class PlayoutPieceInstanceModelImpl implements PlayoutPieceInstanceModel ) } } + +function createExpectedPackagesMap( + rundownId: RundownId, + packages: ExpectedPackage.Base[] | undefined +): Map> { + const map = new Map>() + if (!packages) return map + + for (const pkg of packages) { + map.set(getExpectedPackageId(rundownId, pkg), pkg) + } + return map +} diff --git a/packages/job-worker/src/playout/model/implementation/SavePlayoutModel.ts b/packages/job-worker/src/playout/model/implementation/SavePlayoutModel.ts index e5e218c7b8..e43d55348c 100644 --- a/packages/job-worker/src/playout/model/implementation/SavePlayoutModel.ts +++ b/packages/job-worker/src/playout/model/implementation/SavePlayoutModel.ts @@ -1,4 +1,10 @@ -import { PartInstanceId, PieceInstanceId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { + ExpectedPackageId, + PartInstanceId, + PieceInstanceId, + RundownId, + RundownPlaylistId, +} from '@sofie-automation/corelib/dist/dataModel/Ids' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' import { DBSegment, SegmentOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Segment' @@ -7,6 +13,11 @@ import { AnyBulkWriteOperation } from 'mongodb' import { JobContext } from '../../../jobs/index.js' import { PlayoutPartInstanceModelImpl } from './PlayoutPartInstanceModelImpl.js' import { PlayoutRundownModelImpl } from './PlayoutRundownModelImpl.js' +import { ReadonlyDeep } from 'type-fest' +import { ExpectedPackage } from '@sofie-automation/blueprints-integration' +import { normalizeArrayToMap } from '@sofie-automation/corelib/dist/lib' +import { ExpectedPackageDB } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { StudioJobs } from '@sofie-automation/corelib/dist/worker/studio' /** * Save any changed AdlibTesting Segments @@ -136,3 +147,143 @@ export function writePartInstancesAndPieceInstances( : Promise.resolve(), ] } + +interface ExpectedPackageEntry { + _id: ExpectedPackageId + package: ReadonlyDeep + + pieceInstanceIds: PieceInstanceId[] +} + +export async function writeExpectedPackagesForPlayoutSources( + context: JobContext, + playlistId: RundownPlaylistId, + rundownId: RundownId, + partInstancesForRundown: PlayoutPartInstanceModelImpl[] +): Promise { + // We know we are inside the playout lock, so we can safely load from the packages and it won't be modified by another thread + + const existingPackages = (await context.directCollections.ExpectedPackages.findFetch( + { + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + }, + { + projection: { + _id: 1, + playoutSources: 1, + }, + } + )) as Pick[] + const existingPackagesMap = normalizeArrayToMap(existingPackages, '_id') + + const pieceInstancesToAddToPackages = new Map() + const packagesToInsert = new Map() + + let hasPieceInstanceExpectedPackageChanges = false + + for (const partInstance of partInstancesForRundown) { + if (!partInstance) continue + + for (const pieceInstance of partInstance.pieceInstancesImpl.values()) { + if (!pieceInstance) { + // PieceInstance was deleted, cleanup may be needed + hasPieceInstanceExpectedPackageChanges = true + continue + } + + // The expectedPackages of the PieceInstance has not been modified, so there is nothing to do + if (!pieceInstance.updatedExpectedPackages) continue + + hasPieceInstanceExpectedPackageChanges = true + + // Any removed references will be removed by the debounced job + + for (const [packageId, expectedPackage] of pieceInstance.updatedExpectedPackages) { + const existingPackage = existingPackagesMap.get(packageId) + if (existingPackage?.playoutSources.pieceInstanceIds.includes(pieceInstance.pieceInstance._id)) { + // Reference already exists, nothing to do + continue + } + + if (existingPackage) { + // Add the pieceInstanceId to the existing package + const pieceInstanceIds = pieceInstancesToAddToPackages.get(packageId) ?? [] + pieceInstanceIds.push(pieceInstance.pieceInstance._id) + pieceInstancesToAddToPackages.set(packageId, pieceInstanceIds) + } else { + // Record as needing a new document, or add to existing entry if already queued for insert + const existingEntry = packagesToInsert.get(packageId) + if (existingEntry) { + existingEntry.pieceInstanceIds.push(pieceInstance.pieceInstance._id) + } else { + packagesToInsert.set(packageId, { + _id: packageId, + package: expectedPackage, + pieceInstanceIds: [pieceInstance.pieceInstance._id], + }) + } + + // Future: If this came from a bucket, can we copy the packageInfos across to minimise latency until the status is ready? + } + } + } + } + + // We now know what needs to be written (only the additive changes) + + const writeOps: AnyBulkWriteOperation[] = [] + for (const [packageId, pieceInstanceIds] of pieceInstancesToAddToPackages.entries()) { + writeOps.push({ + updateOne: { + filter: { _id: packageId }, + update: { + $addToSet: { + 'playoutSources.pieceInstanceIds': { $each: pieceInstanceIds }, + }, + }, + }, + }) + } + + for (const packageEntry of packagesToInsert.values()) { + writeOps.push({ + insertOne: { + document: { + _id: packageEntry._id, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + created: Date.now(), + + package: packageEntry.package, + ingestSources: [], + playoutSources: { + pieceInstanceIds: packageEntry.pieceInstanceIds, + }, + }, + }, + }) + } + + if (writeOps.length > 0) { + await context.directCollections.ExpectedPackages.bulkWrite(writeOps) + } + + // We can't easily track any references which have been deleted, so we should schedule a cleanup job to deal with that for us + // Only queue if there were changes to expected packages, to avoid unnecessary job scheduling + if (hasPieceInstanceExpectedPackageChanges) { + await context.queueStudioJob( + StudioJobs.CleanupOrphanedExpectedPackageReferences, + { + playlistId: playlistId, + rundownId: rundownId, + }, + { + lowPriority: true, + debounce: 1000, + } + ) + } +} diff --git a/packages/job-worker/src/playout/model/implementation/__tests__/SavePlayoutModel.spec.ts b/packages/job-worker/src/playout/model/implementation/__tests__/SavePlayoutModel.spec.ts index 94eb084684..497ab26123 100644 --- a/packages/job-worker/src/playout/model/implementation/__tests__/SavePlayoutModel.spec.ts +++ b/packages/job-worker/src/playout/model/implementation/__tests__/SavePlayoutModel.spec.ts @@ -4,12 +4,23 @@ import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { PlayoutRundownModelImpl } from '../PlayoutRundownModelImpl.js' import { setupDefaultJobEnvironment } from '../../../../__mocks__/context.js' -import { writePartInstancesAndPieceInstances, writeAdlibTestingSegments } from '../SavePlayoutModel.js' +import { + writePartInstancesAndPieceInstances, + writeAdlibTestingSegments, + writeExpectedPackagesForPlayoutSources, +} from '../SavePlayoutModel.js' import { PlayoutPartInstanceModelImpl } from '../PlayoutPartInstanceModelImpl.js' -import { PartInstanceId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { + PartInstanceId, + PieceInstanceId, + RundownId, + RundownPlaylistId, +} from '@sofie-automation/corelib/dist/dataModel/Ids' import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' import { mock } from 'jest-mock-extended' import { QuickLoopService } from '../../services/QuickLoopService.js' +import { ExpectedPackageDB, getExpectedPackageId } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { ExpectedPackage } from '@sofie-automation/blueprints-integration' describe('SavePlayoutModel', () => { function createRundownModel(segments?: DBSegment[]): PlayoutRundownModelImpl { @@ -411,4 +422,264 @@ describe('SavePlayoutModel', () => { `) }) }) + + describe('writeExpectedPackagesForPlayoutSources', () => { + const rundownId = protectString('rundown0') + const playlistId = protectString('playlist0') + + function createMockExpectedPackage(id: string): ExpectedPackage.ExpectedPackageMediaFile { + return { + _id: id, + type: ExpectedPackage.PackageType.MEDIA_FILE, + layers: ['layer0'], + content: { filePath: `/media/${id}.mp4` }, + version: {}, + contentVersionHash: `hash_${id}`, + sources: [], + sideEffect: {}, + } + } + + function createPartInstanceWithPieceInstances( + partInstanceId: string, + pieceInstances: PieceInstance[], + hasExpectedPackageChanges: boolean + ): PlayoutPartInstanceModelImpl { + const partInstanceModel = new PlayoutPartInstanceModelImpl( + { _id: partInstanceId, rundownId } as any, + pieceInstances, + hasExpectedPackageChanges, + mock() + ) + return partInstanceModel + } + + function createPieceInstanceWithExpectedPackages( + pieceInstanceId: string, + expectedPackages: ExpectedPackage.Base[] + ): PieceInstance { + return { + _id: protectString(pieceInstanceId), + rundownId: rundownId, + partInstanceId: protectString('partInstance0'), + piece: { + _id: protectString(`piece_${pieceInstanceId}`), + expectedPackages, + }, + } as unknown as PieceInstance + } + + it('no PartInstances', async () => { + const context = setupDefaultJobEnvironment() + + await writeExpectedPackagesForPlayoutSources(context, playlistId, rundownId, []) + + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(1) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + }) + + it('PieceInstance with no expected package changes', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg = createMockExpectedPackage('pkg0') + const pieceInstance = createPieceInstanceWithExpectedPackages('pieceInstance0', [expectedPkg]) + // hasExpectedPackageChanges = false means no updatedExpectedPackages will be set + const partInstance = createPartInstanceWithPieceInstances('partInstance0', [pieceInstance], false) + + await writeExpectedPackagesForPlayoutSources(context, playlistId, rundownId, [partInstance]) + + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(1) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + }) + + it('inserts new ExpectedPackage when PieceInstance has expected packages', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg = createMockExpectedPackage('pkg0') + const pieceInstance = createPieceInstanceWithExpectedPackages('pieceInstance0', [expectedPkg]) + // hasExpectedPackageChanges = true sets up updatedExpectedPackages + const partInstance = createPartInstanceWithPieceInstances('partInstance0', [pieceInstance], true) + + await writeExpectedPackagesForPlayoutSources(context, playlistId, rundownId, [partInstance]) + + // Should have findFetch, bulkWrite, and insertOne (bulkWrite logs itself then calls insertOne which also logs) + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(3) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + expect(context.mockCollections.ExpectedPackages.operations[1].type).toBe('bulkWrite') + expect(context.mockCollections.ExpectedPackages.operations[1].args[0]).toBe(1) // 1 operation + expect(context.mockCollections.ExpectedPackages.operations[2].type).toBe('insertOne') + + // Verify the inserted package has correct structure + const insertedPackageId = context.mockCollections.ExpectedPackages.operations[2].args[0] + const insertedPackage = await context.directCollections.ExpectedPackages.findOne(insertedPackageId) + expect(insertedPackage).toMatchObject({ + _id: insertedPackageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: expectedPkg, + ingestSources: [], + playoutSources: { + pieceInstanceIds: [protectString('pieceInstance0')], + }, + } satisfies Omit) + expect(insertedPackage?.created).toBeGreaterThan(0) + }) + + it('does not add pieceInstanceId if reference already exists in package', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg = createMockExpectedPackage('pkg0') + const packageId = getExpectedPackageId(rundownId, expectedPkg) + const pieceInstanceId = protectString('pieceInstance0') + + // Pre-populate with package that already has this pieceInstanceId + const existingPackage: ExpectedPackageDB = { + _id: packageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + created: Date.now(), + package: expectedPkg, + ingestSources: [], + playoutSources: { + pieceInstanceIds: [pieceInstanceId], + }, + } + await context.directCollections.ExpectedPackages.insertOne(existingPackage) + context.mockCollections.ExpectedPackages.clearOpLog() + + const pieceInstance = createPieceInstanceWithExpectedPackages('pieceInstance0', [expectedPkg]) + const partInstance = createPartInstanceWithPieceInstances('partInstance0', [pieceInstance], true) + + await writeExpectedPackagesForPlayoutSources(context, playlistId, rundownId, [partInstance]) + + // Should only have findFetch, no bulkWrite since reference already exists + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(1) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + }) + + it('handles multiple PieceInstances with different packages', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg1 = createMockExpectedPackage('pkg1') + const expectedPkg2 = createMockExpectedPackage('pkg2') + + const pieceInstance1 = createPieceInstanceWithExpectedPackages('pieceInstance1', [expectedPkg1]) + const pieceInstance2 = createPieceInstanceWithExpectedPackages('pieceInstance2', [expectedPkg2]) + + const partInstance = createPartInstanceWithPieceInstances( + 'partInstance0', + [pieceInstance1, pieceInstance2], + true + ) + + await writeExpectedPackagesForPlayoutSources(context, playlistId, rundownId, [partInstance]) + + // Should have findFetch, bulkWrite, and 2 insertOne ops + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(4) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + expect(context.mockCollections.ExpectedPackages.operations[1].type).toBe('bulkWrite') + expect(context.mockCollections.ExpectedPackages.operations[1].args[0]).toBe(2) // 2 operations + expect(context.mockCollections.ExpectedPackages.operations[2].type).toBe('insertOne') + expect(context.mockCollections.ExpectedPackages.operations[3].type).toBe('insertOne') + }) + + it('handles multiple PieceInstances referencing the same package', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg = createMockExpectedPackage('sharedPkg') + + const pieceInstance1 = createPieceInstanceWithExpectedPackages('pieceInstance1', [expectedPkg]) + const pieceInstance2 = createPieceInstanceWithExpectedPackages('pieceInstance2', [expectedPkg]) + + const partInstance = createPartInstanceWithPieceInstances( + 'partInstance0', + [pieceInstance1, pieceInstance2], + true + ) + + await writeExpectedPackagesForPlayoutSources(context, playlistId, rundownId, [partInstance]) + + // Should have findFetch, bulkWrite, and insertOne + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(3) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + expect(context.mockCollections.ExpectedPackages.operations[1].type).toBe('bulkWrite') + expect(context.mockCollections.ExpectedPackages.operations[1].args[0]).toBe(1) // Only 1 insert for the shared package + expect(context.mockCollections.ExpectedPackages.operations[2].type).toBe('insertOne') + + // Verify the package has both pieceInstanceIds + const insertedPackageId = context.mockCollections.ExpectedPackages.operations[2].args[0] + const insertedPackage = await context.directCollections.ExpectedPackages.findOne(insertedPackageId) + expect(insertedPackage?.playoutSources.pieceInstanceIds).toHaveLength(2) + expect(insertedPackage?.playoutSources.pieceInstanceIds).toContain( + protectString('pieceInstance1') + ) + expect(insertedPackage?.playoutSources.pieceInstanceIds).toContain( + protectString('pieceInstance2') + ) + }) + + it('handles multiple PartInstances', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg1 = createMockExpectedPackage('pkg1') + const expectedPkg2 = createMockExpectedPackage('pkg2') + + const pieceInstance1 = createPieceInstanceWithExpectedPackages('pieceInstance1', [expectedPkg1]) + const pieceInstance2 = createPieceInstanceWithExpectedPackages('pieceInstance2', [expectedPkg2]) + + const partInstance1 = createPartInstanceWithPieceInstances('partInstance1', [pieceInstance1], true) + const partInstance2 = createPartInstanceWithPieceInstances('partInstance2', [pieceInstance2], true) + + await writeExpectedPackagesForPlayoutSources(context, playlistId, rundownId, [partInstance1, partInstance2]) + + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(4) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + expect(context.mockCollections.ExpectedPackages.operations[1].type).toBe('bulkWrite') + expect(context.mockCollections.ExpectedPackages.operations[1].args[0]).toBe(2) // 2 insert operations + expect(context.mockCollections.ExpectedPackages.operations[2].type).toBe('insertOne') + expect(context.mockCollections.ExpectedPackages.operations[3].type).toBe('insertOne') + }) + + it('handles deleted PieceInstance triggering cleanup job', async () => { + const context = setupDefaultJobEnvironment() + + const partInstance = createPartInstanceWithPieceInstances('partInstance0', [], false) + // Simulate a deleted pieceInstance by setting null in the map + partInstance.pieceInstancesImpl.set(protectString('deletedPiece'), null) + + await writeExpectedPackagesForPlayoutSources(context, playlistId, rundownId, [partInstance]) + + // No writes expected since there are no packages to insert/update + // But the cleanup job should still be queued (which is handled silently in mock) + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(1) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + }) + + it('handles PieceInstance with multiple expected packages', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg1 = createMockExpectedPackage('pkg1') + const expectedPkg2 = createMockExpectedPackage('pkg2') + const expectedPkg3 = createMockExpectedPackage('pkg3') + + const pieceInstance = createPieceInstanceWithExpectedPackages('pieceInstance0', [ + expectedPkg1, + expectedPkg2, + expectedPkg3, + ]) + const partInstance = createPartInstanceWithPieceInstances('partInstance0', [pieceInstance], true) + + await writeExpectedPackagesForPlayoutSources(context, playlistId, rundownId, [partInstance]) + + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(5) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + expect(context.mockCollections.ExpectedPackages.operations[1].type).toBe('bulkWrite') + expect(context.mockCollections.ExpectedPackages.operations[1].args[0]).toBe(3) // 3 insert operations + expect(context.mockCollections.ExpectedPackages.operations[2].type).toBe('insertOne') + expect(context.mockCollections.ExpectedPackages.operations[3].type).toBe('insertOne') + expect(context.mockCollections.ExpectedPackages.operations[4].type).toBe('insertOne') + }) + }) }) diff --git a/packages/job-worker/src/playout/snapshot.ts b/packages/job-worker/src/playout/snapshot.ts index c173f2cb29..95d9cb3b4d 100644 --- a/packages/job-worker/src/playout/snapshot.ts +++ b/packages/job-worker/src/playout/snapshot.ts @@ -1,4 +1,9 @@ -import { ExpectedPackageDBType, getExpectedPackageId } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { + ExpectedPackageDB, + ExpectedPackageDBType, + ExpectedPackageIngestSource, + getExpectedPackageId, +} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { AdLibActionId, ExpectedPackageId, @@ -25,12 +30,14 @@ import { CoreRundownPlaylistSnapshot } from '@sofie-automation/corelib/dist/snap import { unprotectString, ProtectedString, protectString } from '@sofie-automation/corelib/dist/protectedString' import { saveIntoDb } from '../db/changes.js' import { getPartId, getSegmentId } from '../ingest/lib.js' -import { assertNever, getRandomId, literal } from '@sofie-automation/corelib/dist/lib' +import { assertNever, getHash, getRandomId, literal, omit } from '@sofie-automation/corelib/dist/lib' import { logger } from '../logging.js' import { JSONBlobParse, JSONBlobStringify } from '@sofie-automation/shared-lib/dist/lib/JSONBlob' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { RundownOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { SofieIngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' +import * as PackagesPreR53 from '@sofie-automation/corelib/dist/dataModel/Old/ExpectedPackagesR52' +import { ExpectedPackage } from '@sofie-automation/blueprints-integration' class IdMapWithGenerator> extends Map { getOrGenerate(key: V): V { @@ -243,7 +250,7 @@ export async function handleRestorePlaylistSnapshot( } // List any ids that need updating on other documents - const rundownIdMap = new Map() + const rundownIdMap = new IdMapWithGenerator() const getNewRundownId = (oldRundownId: RundownId) => { const rundownId = rundownIdMap.get(oldRundownId) if (!rundownId) { @@ -340,47 +347,219 @@ export async function handleRestorePlaylistSnapshot( ) const expectedPackageIdMap = new Map() - for (const expectedPackage of snapshot.expectedPackages) { - const oldId = expectedPackage._id - - switch (expectedPackage.fromPieceType) { - case ExpectedPackageDBType.PIECE: - case ExpectedPackageDBType.ADLIB_PIECE: - case ExpectedPackageDBType.ADLIB_ACTION: - case ExpectedPackageDBType.BASELINE_ADLIB_PIECE: - case ExpectedPackageDBType.BASELINE_ADLIB_ACTION: - case ExpectedPackageDBType.BASELINE_PIECE: { - expectedPackage.pieceId = pieceIdMap.getOrGenerateAndWarn( - expectedPackage.pieceId, - `expectedPackage.pieceId=${expectedPackage.pieceId}` - ) - - expectedPackage._id = getExpectedPackageId(expectedPackage.pieceId, expectedPackage.blueprintPackageId) - - break + snapshot.expectedPackages = snapshot.expectedPackages.map((expectedPackage0): ExpectedPackageDB => { + if ('fromPieceType' in expectedPackage0) { + const expectedPackage = expectedPackage0 as unknown as PackagesPreR53.ExpectedPackageDB + + let source: ExpectedPackageIngestSource | undefined + + switch (expectedPackage.fromPieceType) { + case PackagesPreR53.ExpectedPackageDBType.PIECE: + case PackagesPreR53.ExpectedPackageDBType.ADLIB_PIECE: + case PackagesPreR53.ExpectedPackageDBType.ADLIB_ACTION: + source = { + fromPieceType: expectedPackage.fromPieceType, + pieceId: pieceIdMap.getOrGenerateAndWarn( + expectedPackage.pieceId, + `expectedPackage.pieceId=${expectedPackage.pieceId}` + ) as any, + partId: partIdMap.getOrGenerateAndWarn( + expectedPackage.partId, + `expectedPackage.partId=${expectedPackage.partId}` + ), + segmentId: segmentIdMap.getOrGenerateAndWarn( + expectedPackage.segmentId, + `expectedPackage.segmentId=${expectedPackage.segmentId}` + ), + blueprintPackageId: expectedPackage.blueprintPackageId, + listenToPackageInfoUpdates: expectedPackage.listenToPackageInfoUpdates, + } + + break + case PackagesPreR53.ExpectedPackageDBType.BASELINE_ADLIB_PIECE: + case PackagesPreR53.ExpectedPackageDBType.BASELINE_ADLIB_ACTION: { + source = { + fromPieceType: expectedPackage.fromPieceType, + pieceId: pieceIdMap.getOrGenerateAndWarn( + expectedPackage.pieceId, + `expectedPackage.pieceId=${expectedPackage.pieceId}` + ) as any, + blueprintPackageId: expectedPackage.blueprintPackageId, + listenToPackageInfoUpdates: expectedPackage.listenToPackageInfoUpdates, + } + + break + } + + case PackagesPreR53.ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: { + source = { + fromPieceType: expectedPackage.fromPieceType, + blueprintPackageId: expectedPackage.blueprintPackageId, + listenToPackageInfoUpdates: expectedPackage.listenToPackageInfoUpdates, + } + break + } + case PackagesPreR53.ExpectedPackageDBType.BUCKET_ADLIB: + case PackagesPreR53.ExpectedPackageDBType.BUCKET_ADLIB_ACTION: + case PackagesPreR53.ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: { + // ignore, these are not present in the rundown snapshot anyway. + logger.warn(`Unexpected ExpectedPackage in snapshot: ${JSON.stringify(expectedPackage)}`) + break + } + + default: + assertNever(expectedPackage) + break + } + + if (!source) { + logger.warn(`Failed to fixup ExpectedPackage in snapshot: ${JSON.stringify(expectedPackage)}`) + // Define a fake source, so that it gets imported. + source = { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('fakePiece'), + partId: protectString('fakePart'), + segmentId: protectString('fakeSegment'), + blueprintPackageId: expectedPackage.blueprintPackageId, + listenToPackageInfoUpdates: expectedPackage.listenToPackageInfoUpdates, + } + } + + const packageRundownId: RundownId | null = + 'rundownId' in expectedPackage + ? rundownIdMap.getOrGenerateAndWarn( + expectedPackage.rundownId, + `expectedPackage.rundownId=${expectedPackage.rundownId}` + ) + : null + + // Generate a unique id for the package. + // This is done differently to ensure we don't have id collisions that the documents arent expecting + // Note: maybe this should do the work to generate in the new deduplicated form, but that likely has no benefit + let packageOwnerId: string + const ownerPieceType = source.fromPieceType + switch (source.fromPieceType) { + case ExpectedPackageDBType.PIECE: + case ExpectedPackageDBType.ADLIB_PIECE: + case ExpectedPackageDBType.ADLIB_ACTION: + case ExpectedPackageDBType.BASELINE_PIECE: + case ExpectedPackageDBType.BASELINE_ADLIB_PIECE: + case ExpectedPackageDBType.BASELINE_ADLIB_ACTION: + packageOwnerId = unprotectString(source.pieceId) + break + case ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: + packageOwnerId = 'rundownBaselineObjects' + break + case ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: + packageOwnerId = 'studioBaseline' + break + case ExpectedPackageDBType.BUCKET_ADLIB: + case ExpectedPackageDBType.BUCKET_ADLIB_ACTION: + packageOwnerId = unprotectString(source.pieceId) + break + + default: + assertNever(source) + throw new Error(`Unknown fromPieceType "${ownerPieceType}"`) } - case ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: { - expectedPackage._id = getExpectedPackageId( - expectedPackage.rundownId, + const newPackageId = protectString( + `${packageRundownId || context.studioId}_${packageOwnerId}_${getHash( expectedPackage.blueprintPackageId - ) - break + )}` + ) + + const newExpectedPackage: ExpectedPackageDB = { + _id: newPackageId, + studioId: context.studioId, + rundownId: packageRundownId, + bucketId: null, + created: expectedPackage.created, + package: { + ...(omit( + expectedPackage, + '_id', + 'studioId', + 'fromPieceType', + 'blueprintPackageId', + 'contentVersionHash', + // @ts-expect-error only sometimes present + 'rundownId', + 'pieceId', + 'partId', + 'segmentId', + 'pieceExternalId' + ) as ExpectedPackage.Any), + _id: expectedPackage.blueprintPackageId, + }, + + ingestSources: [source], + playoutSources: { + pieceInstanceIds: [], + }, } - case ExpectedPackageDBType.BUCKET_ADLIB: - case ExpectedPackageDBType.BUCKET_ADLIB_ACTION: - case ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: { - // ignore, these are not present in the rundown snapshot anyway. - logger.warn(`Unexpected ExpectedPackage in snapshot: ${JSON.stringify(expectedPackage)}`) - break + + expectedPackageIdMap.set(expectedPackage._id, newExpectedPackage._id) + return newExpectedPackage + } else { + const expectedPackage = expectedPackage0 + const oldId = expectedPackage._id + + for (const source of expectedPackage.ingestSources) { + switch (source.fromPieceType) { + case ExpectedPackageDBType.PIECE: + case ExpectedPackageDBType.ADLIB_PIECE: + case ExpectedPackageDBType.ADLIB_ACTION: + source.pieceId = pieceIdMap.getOrGenerateAndWarn( + source.pieceId, + `expectedPackage.pieceId=${source.pieceId}` + ) as any + source.partId = partIdMap.getOrGenerateAndWarn( + source.partId, + `expectedPackage.partId=${source.partId}` + ) + source.segmentId = segmentIdMap.getOrGenerateAndWarn( + source.segmentId, + `expectedPackage.segmentId=${source.segmentId}` + ) + + break + case ExpectedPackageDBType.BASELINE_PIECE: + case ExpectedPackageDBType.BASELINE_ADLIB_PIECE: + case ExpectedPackageDBType.BASELINE_ADLIB_ACTION: { + source.pieceId = pieceIdMap.getOrGenerateAndWarn( + source.pieceId, + `expectedPackage.pieceId=${source.pieceId}` + ) as any + + break + } + case ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: { + // No properties to update + break + } + case ExpectedPackageDBType.BUCKET_ADLIB: + case ExpectedPackageDBType.BUCKET_ADLIB_ACTION: + case ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: { + // ignore, these are not present in the rundown snapshot anyway. + logger.warn(`Unexpected ExpectedPackage in snapshot: ${JSON.stringify(expectedPackage)}`) + break + } + default: + assertNever(source) + break + } } - default: - assertNever(expectedPackage) - break - } + // Regenerate the ID from the new rundownId and packageId + expectedPackage._id = getExpectedPackageId( + expectedPackage.rundownId || expectedPackage.studioId, + expectedPackage.package + ) - expectedPackageIdMap.set(oldId, expectedPackage._id) - } + expectedPackageIdMap.set(oldId, expectedPackage._id) + return expectedPackage + } + }) snapshot.playlist.rundownIdsInOrder = snapshot.playlist.rundownIdsInOrder.map((id) => rundownIdMap.get(id) ?? id) diff --git a/packages/job-worker/src/playout/take.ts b/packages/job-worker/src/playout/take.ts index 036b3bb053..cea6887199 100644 --- a/packages/job-worker/src/playout/take.ts +++ b/packages/job-worker/src/playout/take.ts @@ -198,6 +198,10 @@ export async function performTakeToNextedPart( if (!takeRundown) throw new Error(`takeRundown: takeRundown not found! ("${takePartInstance.partInstance.rundownId}")`) + if (takePartInstance.partInstance.invalidReason) { + throw UserError.create(UserErrorMessage.TakePartInstanceInvalid) + } + const showStyle = await pShowStyle const blueprint = await context.getShowStyleBlueprint(showStyle._id) diff --git a/packages/job-worker/src/playout/timeline/generate.ts b/packages/job-worker/src/playout/timeline/generate.ts index 960bf02f5b..44acd584a4 100644 --- a/packages/job-worker/src/playout/timeline/generate.ts +++ b/packages/job-worker/src/playout/timeline/generate.ts @@ -91,7 +91,7 @@ export async function updateStudioTimeline( const studioBlueprint = context.studioBlueprint if (studioBlueprint) { - const watchedPackages = await WatchedPackagesHelper.create(context, { + const watchedPackages = await WatchedPackagesHelper.create(context, null, null, { fromPieceType: ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS, }) diff --git a/packages/job-worker/src/studio/model/StudioBaselineHelper.ts b/packages/job-worker/src/studio/model/StudioBaselineHelper.ts index 99336b67c6..e8d4908465 100644 --- a/packages/job-worker/src/studio/model/StudioBaselineHelper.ts +++ b/packages/job-worker/src/studio/model/StudioBaselineHelper.ts @@ -1,16 +1,24 @@ import { JobContext } from '../../jobs/index.js' import { ExpectedPackageDB, - ExpectedPackageDBFromStudioBaselineObjects, ExpectedPackageDBType, + ExpectedPackageIngestSourceStudioBaseline, + getExpectedPackageId, } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItemStudio } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' import { saveIntoDb } from '../../db/changes.js' +import { ExpectedPackage } from '@sofie-automation/blueprints-integration' +import type { IngestExpectedPackage } from '../../ingest/model/IngestExpectedPackage.js' +import { sanitiseExpectedPackages } from '../../ingest/expectedPackages.js' +import { ExpectedPackageId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { Complete } from '@sofie-automation/corelib/dist/lib' export class StudioBaselineHelper { readonly #context: JobContext - #pendingExpectedPackages: ExpectedPackageDBFromStudioBaselineObjects[] | undefined + #pendingExpectedPackages: + | Map> + | undefined #pendingExpectedPlayoutItems: ExpectedPlayoutItemStudio[] | undefined constructor(context: JobContext) { @@ -21,8 +29,26 @@ export class StudioBaselineHelper { return !!this.#pendingExpectedPackages || !!this.#pendingExpectedPlayoutItems } - setExpectedPackages(packages: ExpectedPackageDBFromStudioBaselineObjects[]): void { - this.#pendingExpectedPackages = packages + setExpectedPackages(packages: ExpectedPackage.Any[]): void { + sanitiseExpectedPackages(packages) + + // Using a map here is a bit excessive, but it makes it easier to remove duplicates + this.#pendingExpectedPackages = new Map() + for (const expectedPackage of packages) { + const id = getExpectedPackageId(this.#context.studioId, expectedPackage) + + this.#pendingExpectedPackages.set(id, { + packageId: id, + + package: expectedPackage, + + source: { + fromPieceType: ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS, + blueprintPackageId: expectedPackage._id, + listenToPackageInfoUpdates: expectedPackage.listenToPackageInfoUpdates, + }, + } satisfies IngestExpectedPackage) + } } setExpectedPlayoutItems(playoutItems: ExpectedPlayoutItemStudio[]): void { this.#pendingExpectedPlayoutItems = playoutItems @@ -39,14 +65,39 @@ export class StudioBaselineHelper { ) : undefined, this.#pendingExpectedPackages - ? saveIntoDb( + ? // We can be naive here, as we know the packages will have only one owner (the studio baseline) + saveIntoDb( this.#context, this.#context.directCollections.ExpectedPackages, { studioId: this.#context.studioId, - fromPieceType: ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS, + rundownId: null, + bucketId: null, }, - this.#pendingExpectedPackages + Array.from(this.#pendingExpectedPackages.values()).map( + (pkg) => + ({ + _id: pkg.packageId, + studioId: this.#context.studioId, + rundownId: null, + bucketId: null, + + created: Date.now(), + package: pkg.package, + ingestSources: [pkg.source], + playoutSources: { + // This doesn't belong to a rundown, so can't be referenced by playout + pieceInstanceIds: [], + }, + }) satisfies Complete + ), + { + beforeDiff: (doc, oldDoc) => { + // Preserve the created date + doc.created = oldDoc.created + return doc + }, + } ) : undefined, ]) diff --git a/packages/job-worker/src/studio/model/StudioPlayoutModel.ts b/packages/job-worker/src/studio/model/StudioPlayoutModel.ts index fefc0e8914..33145c4e6e 100644 --- a/packages/job-worker/src/studio/model/StudioPlayoutModel.ts +++ b/packages/job-worker/src/studio/model/StudioPlayoutModel.ts @@ -1,15 +1,15 @@ -import { RundownPlaylistId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' -import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' -import { +import type { RundownPlaylistId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import type { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' +import type { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import type { TimelineComplete, TimelineCompleteGenerationVersions, TimelineObjGeneric, } from '@sofie-automation/corelib/dist/dataModel/Timeline' -import { BaseModel } from '../../modelBase.js' -import { ReadonlyDeep } from 'type-fest' -import { ExpectedPackageDBFromStudioBaselineObjects } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' -import { ExpectedPlayoutItemStudio } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' +import type { BaseModel } from '../../modelBase.js' +import type { ReadonlyDeep } from 'type-fest' +import type { ExpectedPlayoutItemStudio } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' +import type { ExpectedPackage } from '@sofie-automation/blueprints-integration' export interface StudioPlayoutModelBaseReadonly { /** @@ -35,7 +35,7 @@ export interface StudioPlayoutModelBase extends StudioPlayoutModelBaseReadonly { * Update the ExpectedPackages for the StudioBaseline of the current Studio * @param packages ExpectedPackages to store */ - setExpectedPackagesForStudioBaseline(packages: ExpectedPackageDBFromStudioBaselineObjects[]): void + setExpectedPackagesForStudioBaseline(packages: ExpectedPackage.Any[]): void /** * Update the ExpectedPlayoutItems for the StudioBaseline of the current Studio * @param playoutItems ExpectedPlayoutItems to store diff --git a/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts b/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts index 50f9cb291a..55a8e97808 100644 --- a/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts +++ b/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts @@ -15,9 +15,9 @@ import { IS_PRODUCTION } from '../../environment.js' import { logger } from '../../logging.js' import { StudioPlayoutModel } from './StudioPlayoutModel.js' import { DatabasePersistedModel } from '../../modelBase.js' -import { ExpectedPackageDBFromStudioBaselineObjects } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItemStudio } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' import { StudioBaselineHelper } from './StudioBaselineHelper.js' +import { ExpectedPackage } from '@sofie-automation/blueprints-integration' /** * This is a model used for studio operations. @@ -82,7 +82,7 @@ export class StudioPlayoutModelImpl implements StudioPlayoutModel { return this.context.studio.settings.multiGatewayNowSafeLatency } - setExpectedPackagesForStudioBaseline(packages: ExpectedPackageDBFromStudioBaselineObjects[]): void { + setExpectedPackagesForStudioBaseline(packages: ExpectedPackage.Any[]): void { this.#baselineHelper.setExpectedPackages(packages) } setExpectedPlayoutItemsForStudioBaseline(playoutItems: ExpectedPlayoutItemStudio[]): void { diff --git a/packages/job-worker/src/workers/context/JobContextImpl.ts b/packages/job-worker/src/workers/context/JobContextImpl.ts index 08d6a7dc82..d47dd309c0 100644 --- a/packages/job-worker/src/workers/context/JobContextImpl.ts +++ b/packages/job-worker/src/workers/context/JobContextImpl.ts @@ -1,5 +1,5 @@ import { IDirectCollections } from '../../db/index.js' -import { JobContext, JobStudio } from '../../jobs/index.js' +import { JobContext, JobStudio, QueueJobOptions } from '../../jobs/index.js' import { WorkerDataCache } from '../caches.js' import { RundownId, RundownPlaylistId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { getIngestQueueName, IngestJobFunc } from '@sofie-automation/corelib/dist/worker/ingest' @@ -137,13 +137,17 @@ export class JobContextImpl extends StudioCacheContextImpl implements JobContext } async queueIngestJob(name: T, data: Parameters[0]): Promise { - await this.queueJob(getIngestQueueName(this.studioId), name, data) + await this.queueJob(getIngestQueueName(this.studioId), name, data, undefined) } - async queueStudioJob(name: T, data: Parameters[0]): Promise { - await this.queueJob(getStudioQueueName(this.studioId), name, data) + async queueStudioJob( + name: T, + data: Parameters[0], + options?: QueueJobOptions + ): Promise { + await this.queueJob(getStudioQueueName(this.studioId), name, data, options) } async queueEventJob(name: T, data: Parameters[0]): Promise { - await this.queueJob(getEventsQueueName(this.studioId), name, data) + await this.queueJob(getEventsQueueName(this.studioId), name, data, undefined) } hackPublishTimelineToFastTrack(newTimeline: TimelineComplete): void { diff --git a/packages/job-worker/src/workers/context/util.ts b/packages/job-worker/src/workers/context/util.ts index 38ac084220..3320453677 100644 --- a/packages/job-worker/src/workers/context/util.ts +++ b/packages/job-worker/src/workers/context/util.ts @@ -1 +1,8 @@ -export type QueueJobFunc = (queueName: string, jobName: string, jobData: unknown) => Promise +import type { QueueJobOptions } from '../../jobs/index.js' + +export type QueueJobFunc = ( + queueName: string, + jobName: string, + jobData: unknown, + options: QueueJobOptions | undefined +) => Promise diff --git a/packages/job-worker/src/workers/ingest/jobs.ts b/packages/job-worker/src/workers/ingest/jobs.ts index b8cdbb0779..2bc85736ca 100644 --- a/packages/job-worker/src/workers/ingest/jobs.ts +++ b/packages/job-worker/src/workers/ingest/jobs.ts @@ -29,7 +29,7 @@ import { handleUpdatedSegment, handleUpdatedSegmentRanks, } from '../../ingest/ingestSegmentJobs.js' -import { handleExpectedPackagesRegenerate, handleUpdatedPackageInfoForRundown } from '../../ingest/packageInfo.js' +import { handleUpdatedPackageInfoForRundown } from '../../ingest/packageInfo.js' import { handleBucketActionModify, handleBucketActionRegenerateExpectedPackages, @@ -81,7 +81,6 @@ export const ingestJobHandlers: IngestJobHandlers = { [IngestJobs.MosMoveStory]: wrapMosIngestJob(handleMosMoveStories), [IngestJobs.MosSwapStory]: wrapMosIngestJob(handleMosSwapStories), - [IngestJobs.ExpectedPackagesRegenerate]: handleExpectedPackagesRegenerate, [IngestJobs.PackageInfosUpdatedRundown]: handleUpdatedPackageInfoForRundown, [IngestJobs.UserRemoveRundown]: handleUserRemoveRundown, diff --git a/packages/job-worker/src/workers/studio/jobs.ts b/packages/job-worker/src/workers/studio/jobs.ts index 4e08fd7edb..be5d81787d 100644 --- a/packages/job-worker/src/workers/studio/jobs.ts +++ b/packages/job-worker/src/workers/studio/jobs.ts @@ -48,6 +48,7 @@ import { handleClearQuickLoopMarkers, handleSetQuickLoopMarker } from '../../pla import { handleActivateAdlibTesting } from '../../playout/adlibTesting.js' import { handleExecuteBucketAdLibOrAction } from '../../playout/bucketAdlibJobs.js' import { handleSwitchRouteSet } from '../../studio/routeSet.js' +import { handleCleanupOrphanedExpectedPackageReferences } from '../../playout/expectedPackages.js' type ExecutableFunction = ( context: JobContext, @@ -110,4 +111,6 @@ export const studioJobHandlers: StudioJobHandlers = { [StudioJobs.ClearQuickLoopMarkers]: handleClearQuickLoopMarkers, [StudioJobs.SwitchRouteSet]: handleSwitchRouteSet, + + [StudioJobs.CleanupOrphanedExpectedPackageReferences]: handleCleanupOrphanedExpectedPackageReferences, } diff --git a/packages/job-worker/tsconfig.build.json b/packages/job-worker/tsconfig.build.json index e39fb66951..7b0825b3e3 100755 --- a/packages/job-worker/tsconfig.build.json +++ b/packages/job-worker/tsconfig.build.json @@ -1,9 +1,10 @@ { "extends": "@sofie-automation/code-standard-preset/ts/tsconfig.lib", "include": ["src/**/*.ts"], - "exclude": ["node_modules/**", "src/**/*spec.ts", "src/**/__tests__/*", "src/**/__mocks__/*"], + "exclude": ["node_modules/**", "**/*spec.ts", "**/__tests__/*", "**/__mocks__/*"], "compilerOptions": { "outDir": "./dist", + "rootDir": "./src", "baseUrl": "./", "paths": { "*": ["./node_modules/*"], @@ -14,6 +15,13 @@ "resolveJsonModule": true, "types": ["node"], "skipLibCheck": true, - "esModuleInterop": true - } + "esModuleInterop": true, + "composite": true + }, + "references": [ + // + { "path": "../shared-lib/tsconfig.build.json" }, + { "path": "../blueprints-integration/tsconfig.build.json" }, + { "path": "../corelib/tsconfig.build.json" } + ] } diff --git a/packages/live-status-gateway-api/package.json b/packages/live-status-gateway-api/package.json index 7ad9b78d77..9e7064204b 100644 --- a/packages/live-status-gateway-api/package.json +++ b/packages/live-status-gateway-api/package.json @@ -15,8 +15,7 @@ }, "homepage": "https://github.com/nrkno/sofie-core/blob/master/packages/live-status-gateway-api#readme", "scripts": { - "build": "yarn generate-schema-types && run -T rimraf dist && run build:main", - "build:main": "run -T tsc -p tsconfig.build.json", + "build:prepare": "run generate-schema-types", "lint:raw": "run -T eslint", "lint": "run lint:raw .", "unit": "run -T jest", diff --git a/packages/live-status-gateway-api/tsconfig.build.json b/packages/live-status-gateway-api/tsconfig.build.json index 957dfc4586..f42f059c43 100644 --- a/packages/live-status-gateway-api/tsconfig.build.json +++ b/packages/live-status-gateway-api/tsconfig.build.json @@ -1,16 +1,18 @@ { "extends": "@sofie-automation/code-standard-preset/ts/tsconfig.lib", "include": ["src/**/*.ts"], - "exclude": ["node_modules/**", "src/**/*spec.ts", "src/**/__tests__/*", "src/**/__mocks__/*"], + "exclude": ["node_modules/**", "**/*spec.ts", "**/__tests__/*", "**/__mocks__/*"], "compilerOptions": { "target": "es2019", "outDir": "./dist", + "rootDir": "./src", "baseUrl": "./", "paths": { "*": ["./node_modules/*"], "@sofie-automation/live-status-gateway-api": ["./src/index.ts"] }, "resolveJsonModule": true, - "types": ["node"] + "types": ["node"], + "composite": true } } diff --git a/packages/live-status-gateway/package.json b/packages/live-status-gateway/package.json index e82e4d0b19..f20b973b74 100644 --- a/packages/live-status-gateway/package.json +++ b/packages/live-status-gateway/package.json @@ -15,10 +15,6 @@ "homepage": "https://github.com/Sofie-Automation/sofie-core/blob/main/packages/live-status-gateway#readme", "contributors": [], "scripts": { - "build": "run -T rimraf dist && run build:main", - "buildstart": "run build && run dev", - "buildinspect": "run build && run inspect", - "build:main": "run -T tsc -p tsconfig.build.json", "lint:raw": "run -T eslint --ignore-pattern server", "lint": "run lint:raw .", "unit": "run -T jest", diff --git a/packages/live-status-gateway/tsconfig.build.json b/packages/live-status-gateway/tsconfig.build.json index cf0800867a..a4dfc1e45c 100644 --- a/packages/live-status-gateway/tsconfig.build.json +++ b/packages/live-status-gateway/tsconfig.build.json @@ -1,9 +1,10 @@ { "extends": "@sofie-automation/code-standard-preset/ts/tsconfig.bin", "include": ["src/**/*.ts"], - "exclude": ["node_modules/**", "src/**/*spec.ts", "src/**/__tests__/*", "src/**/__mocks__/*"], + "exclude": ["node_modules/**", "**/*spec.ts", "**/__tests__/*", "**/__mocks__/*"], "compilerOptions": { "outDir": "./dist", + "rootDir": "./src", "baseUrl": "./", "paths": { "*": ["./node_modules/*"], @@ -13,5 +14,12 @@ "resolveJsonModule": true, "skipLibCheck": true, "esModuleInterop": true - } + }, + "references": [ + { "path": "../shared-lib/tsconfig.build.json" }, + { "path": "../live-status-gateway-api/tsconfig.build.json" }, + { "path": "../blueprints-integration/tsconfig.build.json" }, + { "path": "../server-core-integration/tsconfig.build.json" }, + { "path": "../corelib/tsconfig.build.json" } + ] } diff --git a/packages/meteor-lib/package.json b/packages/meteor-lib/package.json index 3f4cfe41de..f80535e678 100644 --- a/packages/meteor-lib/package.json +++ b/packages/meteor-lib/package.json @@ -16,8 +16,6 @@ }, "homepage": "https://github.com/nrkno/sofie-core/blob/main/packages/corelib#readme", "scripts": { - "build": "run -T rimraf dist && run build:main", - "build:main": "run -T tsc -p tsconfig.build.json", "lint:raw": "run -T eslint", "lint": "run lint:raw .", "unit": "run -T jest", diff --git a/packages/meteor-lib/src/collections/ExpectedPackages.ts b/packages/meteor-lib/src/collections/ExpectedPackages.ts index ca7d656a5c..5815971453 100644 --- a/packages/meteor-lib/src/collections/ExpectedPackages.ts +++ b/packages/meteor-lib/src/collections/ExpectedPackages.ts @@ -6,6 +6,7 @@ import { htmlTemplateGetSteps, htmlTemplateGetFileNamesFromSteps, } from '@sofie-automation/shared-lib/dist/package-manager/helpers' +import { ReadonlyDeep } from 'type-fest' export function getPreviewPackageSettings( expectedPackage: ExpectedPackage.Any @@ -55,7 +56,7 @@ export function getThumbnailPackageSettings( } } export function getSideEffect( - expectedPackage: ExpectedPackage.Base, + expectedPackage: ReadonlyDeep, studio: Pick ): ExpectedPackage.Base['sideEffect'] { return deepExtend( diff --git a/packages/meteor-lib/src/triggers/RundownViewEventBus.ts b/packages/meteor-lib/src/triggers/RundownViewEventBus.ts index 60b575aeba..5cb11ce75c 100644 --- a/packages/meteor-lib/src/triggers/RundownViewEventBus.ts +++ b/packages/meteor-lib/src/triggers/RundownViewEventBus.ts @@ -29,6 +29,7 @@ export enum RundownViewEvents { REVEAL_IN_SHELF = 'revealInShelf', SWITCH_SHELF_TAB = 'switchShelfTab', SHELF_STATE = 'shelfState', + EDIT_MODE = 'editMode', MINI_SHELF_QUEUE_ADLIB = 'miniShelfQueueAdLib', GO_TO_PART = 'goToPart', GO_TO_PART_INSTANCE = 'goToPartInstance', @@ -74,6 +75,10 @@ export interface ShelfStateEvent extends IEventContext { state: boolean | 'toggle' } +export interface EditModeEvent extends IEventContext { + state: boolean | 'toggle' +} + export interface MiniShelfQueueAdLibEvent extends IEventContext { forward: boolean } @@ -139,6 +144,7 @@ export interface RundownViewEventBusEvents { [RundownViewEvents.SEGMENT_ZOOM_ON]: [] [RundownViewEvents.SEGMENT_ZOOM_OFF]: [] [RundownViewEvents.SHELF_STATE]: [e: ShelfStateEvent] + [RundownViewEvents.EDIT_MODE]: [e: EditModeEvent] [RundownViewEvents.REVEAL_IN_SHELF]: [e: RevealInShelfEvent] [RundownViewEvents.SWITCH_SHELF_TAB]: [e: SwitchToShelfTabEvent] [RundownViewEvents.MINI_SHELF_QUEUE_ADLIB]: [e: MiniShelfQueueAdLibEvent] diff --git a/packages/meteor-lib/src/triggers/actionFactory.ts b/packages/meteor-lib/src/triggers/actionFactory.ts index e7410ce173..2f5edfeefb 100644 --- a/packages/meteor-lib/src/triggers/actionFactory.ts +++ b/packages/meteor-lib/src/triggers/actionFactory.ts @@ -286,6 +286,17 @@ function createShelfAction(_filterChain: IGUIContextFilterLink[], state: boolean } } +function createEditModeAction(_filterChain: IGUIContextFilterLink[], state: boolean | 'toggle'): ExecutableAction { + return { + action: ClientActions.editMode, + execute: () => { + RundownViewEventBus.emit(RundownViewEvents.EDIT_MODE, { + state, + }) + }, + } +} + function createMiniShelfQueueAdLibAction(_filterChain: IGUIContextFilterLink[], forward: boolean): ExecutableAction { return { action: ClientActions.miniShelfQueueAdLib, @@ -442,6 +453,8 @@ export function createAction( switch (action.action) { case ClientActions.shelf: return createShelfAction(action.filterChain, action.state) + case ClientActions.editMode: + return createEditModeAction(action.filterChain, action.state) case ClientActions.goToOnAirLine: return createGoToOnAirLineAction(action.filterChain) case ClientActions.rewindSegments: diff --git a/packages/meteor-lib/tsconfig.build.json b/packages/meteor-lib/tsconfig.build.json index 69627fd13d..27eaa1ca65 100755 --- a/packages/meteor-lib/tsconfig.build.json +++ b/packages/meteor-lib/tsconfig.build.json @@ -1,10 +1,11 @@ { "extends": "@sofie-automation/code-standard-preset/ts/tsconfig.lib", "include": ["src/**/*.ts"], - "exclude": ["node_modules/**", "src/**/*spec.ts", "src/**/__tests__/*", "src/**/__mocks__/*"], + "exclude": ["node_modules/**", "**/*spec.ts", "**/__tests__/*", "**/__mocks__/*"], "compilerOptions": { "target": "es2019", "outDir": "./dist", + "rootDir": "./src", "baseUrl": "./", "paths": { "*": ["./node_modules/*"], @@ -12,6 +13,13 @@ }, "resolveJsonModule": true, "types": ["node"], - "esModuleInterop": true - } + "esModuleInterop": true, + "composite": true + }, + "references": [ + // + { "path": "../shared-lib/tsconfig.build.json" }, + { "path": "../blueprints-integration/tsconfig.build.json" }, + { "path": "../corelib/tsconfig.build.json" } + ] } diff --git a/packages/mos-gateway/Dockerfile b/packages/mos-gateway/Dockerfile index 0d446cdddf..ee48042026 100644 --- a/packages/mos-gateway/Dockerfile +++ b/packages/mos-gateway/Dockerfile @@ -8,7 +8,7 @@ COPY . . RUN corepack enable RUN yarn install --immutable RUN yarn run pinst --disable -RUN yarn lerna run --scope \*\*/mos-gateway --include-dependencies --stream build +RUN yarn build:single mos-gateway/tsconfig.build.json RUN yarn workspaces focus mos-gateway --production # purge dev-dependencies # DEPLOY IMAGE diff --git a/packages/mos-gateway/package.json b/packages/mos-gateway/package.json index 8587371c25..ec8d5f79f1 100644 --- a/packages/mos-gateway/package.json +++ b/packages/mos-gateway/package.json @@ -26,10 +26,6 @@ } ], "scripts": { - "build": "run -T rimraf dist && run build:main", - "buildstart": "run build && run dev", - "buildinspect": "run build && run inspect", - "build:main": "run -T tsc -p tsconfig.build.json", "lint:raw": "run -T eslint", "lint": "run lint:raw .", "lint-fix": "run lint --fix", diff --git a/packages/mos-gateway/tsconfig.build.json b/packages/mos-gateway/tsconfig.build.json index 5b6439320c..67f2cc0cfe 100644 --- a/packages/mos-gateway/tsconfig.build.json +++ b/packages/mos-gateway/tsconfig.build.json @@ -1,7 +1,7 @@ { "extends": "@sofie-automation/code-standard-preset/ts/tsconfig.bin", "include": ["src/**/*.ts"], - "exclude": ["node_modules/**", "src/**/*spec.ts", "src/**/__tests__/*", "src/**/__mocks__/*"], + "exclude": ["node_modules/**", "**/*spec.ts", "**/__tests__/*", "**/__mocks__/*"], "compilerOptions": { "outDir": "./dist", "baseUrl": "./", @@ -13,5 +13,10 @@ "resolveJsonModule": true, "skipLibCheck": true, "esModuleInterop": true - } + }, + "references": [ + // + { "path": "../shared-lib/tsconfig.build.json" }, + { "path": "../server-core-integration/tsconfig.build.json" } + ] } diff --git a/packages/openapi/tsconfig.build.json b/packages/openapi/tsconfig.build.json index 83ad5a8cb7..288791ae90 100644 --- a/packages/openapi/tsconfig.build.json +++ b/packages/openapi/tsconfig.build.json @@ -1,7 +1,7 @@ { "extends": "@sofie-automation/code-standard-preset/ts/tsconfig.lib", "include": ["client/ts/**/*.ts", "src/**/*.ts", "index.ts"], - "exclude": ["node_modules/**", "src/**/*spec.ts", "src/**/__tests__/*", "src/**/__mocks__/*"], + "exclude": ["node_modules/**", "**/*spec.ts", "**/__tests__/*", "**/__mocks__/*"], "compilerOptions": { "outDir": "./dist", "baseUrl": "./", diff --git a/packages/package.json b/packages/package.json index c5c2ddff1d..f7cb9e8bb5 100644 --- a/packages/package.json +++ b/packages/package.json @@ -16,10 +16,13 @@ }, "scripts": { "prepare": "lerna run prepare", - "build": "lerna run build --ignore @sofie-automation/openapi", - "build:all": "lerna run build", - "build:try": "lerna run --no-bail build --ignore @sofie-automation/openapi || true", - "watch": "lerna run --parallel build:main --ignore @sofie-automation/openapi -- --watch --preserveWatchOutput", + "build": "run build:prepare && run build:main", + "build:prepare": "(rimraf */dist */*.tsbuildinfo || true) && lerna run build:prepare", + "build:main": "tsc --build tsconfig.build.json", + "build:all": "run build && lerna run build --scope @sofie-automation/openapi", + "build:try": "run build || true", + "build:single": "run build:prepare && tsc --build", + "watch": "run build:main --watch", "stage-versions": "git add -u \"*/package.json\" \"*/CHANGELOG.md\" lerna.json yarn.lock", "set-version": "lerna version --exact --no-changelog --no-git-tag-version --no-push --yes", "set-version-and-commit": "lerna version --exact --no-changelog --no-changelog --no-commit-hooks --force-publish='*' --no-push --yes", diff --git a/packages/playout-gateway/Dockerfile b/packages/playout-gateway/Dockerfile index 108f589103..8153989fab 100644 --- a/packages/playout-gateway/Dockerfile +++ b/packages/playout-gateway/Dockerfile @@ -8,7 +8,7 @@ COPY . . RUN corepack enable RUN yarn install --immutable RUN yarn run pinst --disable -RUN yarn lerna run --scope \*\*/playout-gateway --include-dependencies --stream build +RUN yarn build:single playout-gateway/tsconfig.build.json RUN yarn workspaces focus playout-gateway --production # purge dev-dependencies # DEPLOY IMAGE diff --git a/packages/playout-gateway/package.json b/packages/playout-gateway/package.json index 3f54bb6864..c72dbe7821 100644 --- a/packages/playout-gateway/package.json +++ b/packages/playout-gateway/package.json @@ -20,10 +20,6 @@ }, "contributors": [], "scripts": { - "build": "run -T rimraf dist && run build:main", - "buildstart": "run build && run dev", - "buildinspect": "run build && run inspect", - "build:main": "run -T tsc -p tsconfig.build.json", "lint:raw": "run -T eslint", "lint": "run lint:raw .", "unit": "run -T jest", diff --git a/packages/playout-gateway/src/tsrHandler.ts b/packages/playout-gateway/src/tsrHandler.ts index bb6402b465..6faab8bc61 100644 --- a/packages/playout-gateway/src/tsrHandler.ts +++ b/packages/playout-gateway/src/tsrHandler.ts @@ -262,6 +262,12 @@ export class TSRHandler { const device = this._coreTsrHandlers[id]?._device const name = `Device "${device?.deviceName ?? id}" (${device?.instanceId ?? 'instance unknown'})` + if (!e || !('message' in e)) { + return { + message: name + ': ' + 'Unknown error: ' + JSON.stringify(e), + } + } + return { message: e.message && name + ': ' + e.message, name: e.name && name + ': ' + e.name, @@ -368,7 +374,7 @@ export class TSRHandler { this.tsr.connectionManager.on('connectionEvent:debug', (id, ...args) => { const device = this._coreTsrHandlers[id]?._device - if (!device?.debugLogging && !this._coreHandler.logDebug) { + if (!this._coreHandler.logDebug) { return } if (args.length === 0) { diff --git a/packages/playout-gateway/tsconfig.build.json b/packages/playout-gateway/tsconfig.build.json index d0f135508a..14692f9baf 100644 --- a/packages/playout-gateway/tsconfig.build.json +++ b/packages/playout-gateway/tsconfig.build.json @@ -1,7 +1,7 @@ { "extends": "@sofie-automation/code-standard-preset/ts/tsconfig.bin", "include": ["src/**/*.ts"], - "exclude": ["node_modules/**", "src/**/*spec.ts", "src/**/__tests__/*", "src/**/__mocks__/*"], + "exclude": ["node_modules/**", "**/*spec.ts", "**/__tests__/*", "**/__mocks__/*"], "compilerOptions": { "outDir": "./dist", "baseUrl": "./", @@ -12,5 +12,10 @@ // TSR throws some typings issues "skipLibCheck": true, "resolveJsonModule": true - } + }, + "references": [ + // + { "path": "../shared-lib/tsconfig.build.json" }, + { "path": "../server-core-integration/tsconfig.build.json" } + ] } diff --git a/packages/server-core-integration/package.json b/packages/server-core-integration/package.json index e4b3a7fff2..d474f6dd55 100644 --- a/packages/server-core-integration/package.json +++ b/packages/server-core-integration/package.json @@ -32,15 +32,13 @@ } ], "scripts": { - "build": "run -T rimraf dist && run build:main && run copytypes", - "build:main": "run -T tsc -p tsconfig.build.json", "lint:raw": "run -T eslint", "lint": "run lint:raw .", "unit": "run -T jest", "test": "run lint && run unit", "test:integration": "run lint && run -T jest --config=jest-integration.config.js", "watch": "run -T jest --watch", - "copytypes": "copyfiles -u 1 src/types/* dist", + "build:prepare": "copyfiles -u 1 src/types/* dist", "cov": "run -T jest --coverage; open-cli coverage/lcov-report/index.html", "cov-open": "open-cli coverage/lcov-report/index.html", "validate:dependencies": "yarn npm audit --environment production && run license-validate", diff --git a/packages/server-core-integration/tsconfig.build.json b/packages/server-core-integration/tsconfig.build.json index 8d7a61c0f5..fe4684c51f 100755 --- a/packages/server-core-integration/tsconfig.build.json +++ b/packages/server-core-integration/tsconfig.build.json @@ -1,9 +1,10 @@ { "extends": "@sofie-automation/code-standard-preset/ts/tsconfig.lib", "include": ["src/**/*.ts"], - "exclude": ["node_modules/**", "src/**/*spec.ts", "src/**/__tests__/*", "src/**/__mocks__/*"], + "exclude": ["node_modules/**", "**/*spec.ts", "**/__tests__/*", "**/__mocks__/*"], "compilerOptions": { "outDir": "./dist", + "rootDir": "./src", "baseUrl": "./", "paths": { "*": ["./node_modules/*"], @@ -12,6 +13,12 @@ "resolveJsonModule": true, "types": ["node"], "skipLibCheck": true, - "esModuleInterop": true - } + "esModuleInterop": true, + "composite": true + }, + "references": [ + { + "path": "../shared-lib" + } + ] } diff --git a/packages/server-core-integration/tsconfig.json b/packages/server-core-integration/tsconfig.json index 899b2ddb7a..e2561ed6e1 100755 --- a/packages/server-core-integration/tsconfig.json +++ b/packages/server-core-integration/tsconfig.json @@ -3,6 +3,7 @@ "exclude": ["node_modules/**"], "include": ["src/**/*.ts", "examples/*.ts"], "compilerOptions": { + "rootDir": "./", "types": ["jest", "node"] } } diff --git a/packages/shared-lib/package.json b/packages/shared-lib/package.json index fdf84370aa..e51e4c8032 100644 --- a/packages/shared-lib/package.json +++ b/packages/shared-lib/package.json @@ -15,8 +15,6 @@ }, "homepage": "https://github.com/nrkno/sofie-core/blob/master/packages/shared-lib#readme", "scripts": { - "build": "run -T rimraf dist && run build:main", - "build:main": "run -T tsc -p tsconfig.build.json", "lint:raw": "run -T eslint", "lint": "run lint:raw .", "unit": "run -T jest", diff --git a/packages/shared-lib/src/core/model/ShowStyle.ts b/packages/shared-lib/src/core/model/ShowStyle.ts index ee415a5be4..8c0d3d48be 100644 --- a/packages/shared-lib/src/core/model/ShowStyle.ts +++ b/packages/shared-lib/src/core/model/ShowStyle.ts @@ -107,6 +107,7 @@ export enum ClientActions { 'rewindSegments' = 'rewindSegments', 'showEntireCurrentSegment' = 'showEntireCurrentSegment', 'miniShelfQueueAdLib' = 'miniShelfQueueAdLib', + 'editMode' = 'editMode', } export enum DeviceActions { diff --git a/packages/shared-lib/src/package-manager/package.ts b/packages/shared-lib/src/package-manager/package.ts index 052c149208..0b70f6460b 100644 --- a/packages/shared-lib/src/package-manager/package.ts +++ b/packages/shared-lib/src/package-manager/package.ts @@ -40,7 +40,7 @@ export namespace ExpectedPackage { /** Generic (used in extends) */ export interface Base { - /** Unique id of the expectedPackage */ + /** Blueprint defined unique id of the package within the rundown/bucket */ _id: ExpectedPackageId /** Reference to which timeline-layer(s) the Package is going to be used in. * (Used to route the package to the right playout-device (targets)) diff --git a/packages/shared-lib/src/package-manager/publications.ts b/packages/shared-lib/src/package-manager/publications.ts index bd4f0c1d1c..99a097af6b 100644 --- a/packages/shared-lib/src/package-manager/publications.ts +++ b/packages/shared-lib/src/package-manager/publications.ts @@ -1,6 +1,7 @@ import { ExpectedPackage, PackageContainer, PackageContainerOnPackage } from './package.js' -import { PeripheralDeviceId, PieceInstanceId, RundownId, RundownPlaylistId } from '../core/model/Ids.js' +import { ExpectedPackageId, PeripheralDeviceId, RundownId, RundownPlaylistId } from '../core/model/Ids.js' import { ProtectedString } from '../lib/protectedString.js' +import { ReadonlyDeep } from 'type-fest' export interface PackageManagerPlayoutContext { _id: PeripheralDeviceId @@ -27,18 +28,18 @@ export interface PackageManagerPackageContainers { export type PackageManagerExpectedPackageId = ProtectedString<'PackageManagerExpectedPackage'> -export type PackageManagerExpectedPackageBase = ExpectedPackage.Base & { rundownId?: RundownId } +export type PackageManagerExpectedPackageBase = ReadonlyDeep> & { + _id: ExpectedPackageId +} export interface PackageManagerExpectedPackage { /** Unique id of the expectedPackage */ _id: PackageManagerExpectedPackageId expectedPackage: PackageManagerExpectedPackageBase - /** Lower should be done first */ - priority: number + /** Lower should be done first. If not set, the priority is considered as low priority */ + priority: number | null sources: PackageContainerOnPackage[] targets: PackageContainerOnPackage[] playoutDeviceId: PeripheralDeviceId - - pieceInstanceId: PieceInstanceId | null } diff --git a/packages/shared-lib/tsconfig.build.json b/packages/shared-lib/tsconfig.build.json index 7acc7845d5..83c8cb4ebc 100644 --- a/packages/shared-lib/tsconfig.build.json +++ b/packages/shared-lib/tsconfig.build.json @@ -1,16 +1,18 @@ { "extends": "@sofie-automation/code-standard-preset/ts/tsconfig.lib", "include": ["src/**/*.ts"], - "exclude": ["node_modules/**", "src/**/*spec.ts", "src/**/__tests__/*", "src/**/__mocks__/*"], + "exclude": ["node_modules/**", "**/*spec.ts", "**/__tests__/*", "**/__mocks__/*"], "compilerOptions": { "target": "es2019", "outDir": "./dist", + "rootDir": "./src", "baseUrl": "./", "paths": { "*": ["./node_modules/*"], "@sofie-automation/shared-lib": ["./src/index.ts"] }, "resolveJsonModule": true, - "types": ["node"] + "types": ["node"], + "composite": true } } diff --git a/packages/tsconfig.build.json b/packages/tsconfig.build.json new file mode 100644 index 0000000000..ffb9abd079 --- /dev/null +++ b/packages/tsconfig.build.json @@ -0,0 +1,17 @@ +{ + "files": [], + "references": [ + { "path": "./blueprints-integration/tsconfig.build.json" }, + { "path": "./server-core-integration/tsconfig.build.json" }, + { "path": "./mos-gateway/tsconfig.build.json" }, + { "path": "./playout-gateway/tsconfig.build.json" }, + { "path": "./job-worker/tsconfig.build.json" }, + { "path": "./corelib/tsconfig.build.json" }, + { "path": "./shared-lib/tsconfig.build.json" }, + // { "path": "./openapi/tsconfig.build.json" }, + { "path": "./live-status-gateway/tsconfig.build.json" }, + { "path": "./meteor-lib/tsconfig.build.json" }, + { "path": "./webui/tsconfig.app.json" }, + { "path": "../meteor/tsconfig.json" } + ] +} diff --git a/packages/tsconfig.json b/packages/tsconfig.json index 71c49cb4de..e3041c036f 100644 --- a/packages/tsconfig.json +++ b/packages/tsconfig.json @@ -1,16 +1,17 @@ { "files": [], "references": [ - { "path": "blueprints-integration" }, - { "path": "server-core-integration" }, - { "path": "mos-gateway" }, - { "path": "playout-gateway" }, - { "path": "job-worker" }, - { "path": "corelib" }, - { "path": "shared-lib" }, - { "path": "openapi" }, - { "path": "live-status-gateway" }, - { "path": "meteor-lib" }, - { "path": "webui" } + { "path": "./blueprints-integration/tsconfig.json" }, + { "path": "./server-core-integration/tsconfig.json" }, + { "path": "./mos-gateway/tsconfig.json" }, + { "path": "./playout-gateway/tsconfig.json" }, + { "path": "./job-worker/tsconfig.json" }, + { "path": "./corelib/tsconfig.json" }, + { "path": "./shared-lib/tsconfig.json" }, + { "path": "./openapi/tsconfig.json" }, + { "path": "./live-status-gateway/tsconfig.json" }, + { "path": "./meteor-lib/tsconfig.json" }, + { "path": "./webui/tsconfig.json" }, + { "path": "../meteor/tsconfig.json" } ] } diff --git a/packages/webui/package.json b/packages/webui/package.json index c95d661eaf..b736f7e979 100644 --- a/packages/webui/package.json +++ b/packages/webui/package.json @@ -16,9 +16,7 @@ "scripts": { "dev": "vite --port=3005 --force", "build": "tsc -b && vite build", - "build:main": "tsc -p tsconfig.app.json --noEmit", "check-types": "tsc -p tsconfig.app.json --noEmit", - "watch-types": "run check-types --watch", "preview": "vite preview", "lint:raw": "run -T eslint", "lint": "run lint:raw .", diff --git a/packages/webui/src/client/lib/partInstanceUtil.ts b/packages/webui/src/client/lib/partInstanceUtil.ts new file mode 100644 index 0000000000..8535cf597d --- /dev/null +++ b/packages/webui/src/client/lib/partInstanceUtil.ts @@ -0,0 +1,54 @@ +import { DBPart, PartInvalidReason } from '@sofie-automation/corelib/dist/dataModel/Part' + +/** + * Minimal interface for a PartInstance containing the properties needed for invalidReason checks. + */ +export interface PartInstanceLike { + part: Pick + invalidReason?: PartInvalidReason +} + +/** + * Get the effective invalidReason for a PartInstance. + * + * If the Part has a planned invalidReason (from ingest), it takes precedence. + * Otherwise, returns the runtime invalidReason from the PartInstance (from playout). + * + * This distinction matters because: + * - Part.invalidReason is planned/static (set during ingest, shouldn't create real PartInstance) + * - PartInstance.invalidReason is runtime/dynamic (set during playout, can be fixed) + * + * @param partInstance The PartInstance object + * @returns The effective invalidReason to display, or undefined if none + */ +export function getEffectiveInvalidReason(partInstance: PartInstanceLike): PartInvalidReason | undefined { + // Planned invalidReason (from Part/ingest) takes precedence + // It shouldn't be possible to create a real PartInstance of an invalid Part + if (partInstance.part.invalidReason) { + return partInstance.part.invalidReason + } + + // Runtime invalidReason (from PartInstance/playout) + if (partInstance.invalidReason) { + return partInstance.invalidReason + } + + return undefined +} + +/** + * Check if the effective state is "invalid" for a PartInstance. + * + * A PartInstance is considered invalid if either: + * - The Part has `invalid: true` (planned invalid, may not have an invalidReason) + * - The PartInstance has a runtime invalidReason (runtime invalid) + * + * Note: This is separate from getEffectiveInvalidReason because part.invalid can be true + * without an invalidReason being set (legacy behavior). + * + * @param partInstance The PartInstance object + * @returns true if the part should be shown as invalid + */ +export function isPartInstanceInvalid(partInstance: PartInstanceLike): boolean { + return !!partInstance.part.invalid || !!partInstance.invalidReason +} diff --git a/packages/webui/src/client/lib/ui/pieceUiClassNames.ts b/packages/webui/src/client/lib/ui/pieceUiClassNames.ts index 2b1149dbd0..0b5e350290 100644 --- a/packages/webui/src/client/lib/ui/pieceUiClassNames.ts +++ b/packages/webui/src/client/lib/ui/pieceUiClassNames.ts @@ -19,7 +19,8 @@ export function pieceUiClassNames( uiState?: { leftAnchoredWidth: number rightAnchoredWidth: number - } + }, + draggable?: boolean ): string { const typeClass = layerType ? RundownUtils.getSourceLayerClassName(layerType) : '' @@ -59,5 +60,7 @@ export function pieceUiClassNames( 'invert-flash': highlight, 'element-selected': selected, + + 'draggable-element': draggable, }) } diff --git a/packages/webui/src/client/styles/elementSelected.scss b/packages/webui/src/client/styles/elementSelected.scss index 2dcd17b97b..2cb9b2f73b 100644 --- a/packages/webui/src/client/styles/elementSelected.scss +++ b/packages/webui/src/client/styles/elementSelected.scss @@ -18,3 +18,7 @@ $glow-color: rgba(255, 255, 255, 0.58); } } } + +.draggable-element { + border: dotted white 1px; +} diff --git a/packages/webui/src/client/ui/Prompter/PrompterView.tsx b/packages/webui/src/client/ui/Prompter/PrompterView.tsx index ba1cab594d..de70edff3b 100644 --- a/packages/webui/src/client/ui/Prompter/PrompterView.tsx +++ b/packages/webui/src/client/ui/Prompter/PrompterView.tsx @@ -62,6 +62,9 @@ interface PrompterConfig { pedal_rangeNeutralMax?: number pedal_rangeFwdMax?: number shuttle_speedMap?: number[] + xbox_speedMap?: number[] + xbox_reverseSpeedMap?: number[] + xbox_triggerDeadZone?: number marker?: 'center' | 'top' | 'bottom' | 'hide' showMarker: boolean showScroll: boolean @@ -77,6 +80,7 @@ export enum PrompterConfigMode { JOYCON = 'joycon', PEDAL = 'pedal', SHUTTLEWEBHID = 'shuttlewebhid', + XBOX = 'xbox', } export interface IPrompterControllerState { @@ -175,6 +179,18 @@ export class PrompterViewContent extends React.Component Number.parseInt(value, 10)), + xbox_reverseSpeedMap: + queryParams['xbox_reverseSpeedMap'] === undefined + ? undefined + : asArray(queryParams['xbox_reverseSpeedMap']).map((value) => Number.parseInt(value, 10)), + xbox_triggerDeadZone: (() => { + const val = Number.parseFloat(firstIfArray(queryParams['xbox_triggerDeadZone']) as string) + return Number.isNaN(val) ? undefined : val + })(), marker: (firstIfArray(queryParams['marker']) as any) || undefined, showMarker: queryParams['showmarker'] === undefined ? true : queryParams['showmarker'] === '1', showScroll: queryParams['showscroll'] === undefined ? true : queryParams['showscroll'] === '1', diff --git a/packages/webui/src/client/ui/Prompter/controller/joycon-device.ts b/packages/webui/src/client/ui/Prompter/controller/joycon-device.ts index f0c63a34e3..1f4d6de3ba 100644 --- a/packages/webui/src/client/ui/Prompter/controller/joycon-device.ts +++ b/packages/webui/src/client/ui/Prompter/controller/joycon-device.ts @@ -10,20 +10,20 @@ type JoyconMode = 'L' | 'R' | 'LR' | null * This class handles control of the prompter using */ export class JoyConController extends ControllerAbstract { - private prompterView: PrompterViewContent + private readonly prompterView: PrompterViewContent - private invertJoystick = false // change scrolling direction for joystick - private rangeRevMin = -1 // pedal "all back" position, the max-reverse-position - private rangeNeutralMin = -0.25 // pedal "back" position where reverse-range transitions to the neutral x - private rangeNeutralMax = 0.25 // pedal "front" position where scrolling starts, the 0 speed origin - private rangeFwdMax = 1 // pedal "all front" position where scrolling is maxed out - private rightHandOffset = 1.4 // factor increased by 1.4 to account for the R joystick being less sensitive than L - private speedMap = [1, 2, 3, 4, 5, 8, 12, 30] - private reverseSpeedMap = [1, 2, 3, 4, 5, 8, 12, 30] - private deadBand = 0.25 + private readonly invertJoystick: boolean // change scrolling direction for joystick + private readonly rangeRevMin: number // pedal "all back" position, the max-reverse-position + private readonly rangeNeutralMin: number // pedal "back" position where reverse-range transitions to the neutral x + private readonly rangeNeutralMax: number // pedal "front" position where scrolling starts, the 0 speed origin + private readonly rangeFwdMax: number // pedal "all front" position where scrolling is maxed out + private readonly rightHandOffset: number // factor increased by 1.4 to account for the R joystick being less sensitive than L + private readonly speedMap: number[] + private readonly reverseSpeedMap: number[] + private readonly deadBand: number - private speedSpline: Spline | undefined - private reverseSpeedSpline: Spline | undefined + private readonly speedSpline: Spline | undefined + private readonly reverseSpeedSpline: Spline | undefined private updateSpeedHandle: number | null = null private timestampOfLastUsedJoyconInput = 0 @@ -31,19 +31,22 @@ export class JoyConController extends ControllerAbstract { private lastInputValue = '' private lastButtonInputs: { [index: number]: { mode: JoyconMode; buttons: number[] } } = {} + // Bound event handler for cleanup + private readonly updateScrollPositionBound: (() => void) | undefined + constructor(view: PrompterViewContent) { super() this.prompterView = view // assigns params from URL or falls back to the default - this.invertJoystick = view.configOptions.joycon_invertJoystick || this.invertJoystick - this.rangeRevMin = view.configOptions.joycon_rangeRevMin || this.rangeRevMin - this.rangeNeutralMin = view.configOptions.joycon_rangeNeutralMin || this.rangeNeutralMin - this.rangeNeutralMax = view.configOptions.joycon_rangeNeutralMax || this.rangeNeutralMax - this.rangeFwdMax = view.configOptions.joycon_rangeFwdMax || this.rangeFwdMax - this.rightHandOffset = view.configOptions.joycon_rightHandOffset || this.rightHandOffset - this.speedMap = view.configOptions.joycon_speedMap || this.speedMap - this.reverseSpeedMap = view.configOptions.joycon_reverseSpeedMap || this.reverseSpeedMap + this.invertJoystick = view.configOptions.joycon_invertJoystick || false + this.rangeRevMin = view.configOptions.joycon_rangeRevMin || -1 + this.rangeNeutralMin = view.configOptions.joycon_rangeNeutralMin || -0.25 + this.rangeNeutralMax = view.configOptions.joycon_rangeNeutralMax || 0.25 + this.rangeFwdMax = view.configOptions.joycon_rangeFwdMax || 1 + this.rightHandOffset = view.configOptions.joycon_rightHandOffset || 1.4 + this.speedMap = view.configOptions.joycon_speedMap || [1, 2, 3, 4, 5, 8, 12, 30] + this.reverseSpeedMap = view.configOptions.joycon_reverseSpeedMap || [1, 2, 3, 4, 5, 8, 12, 30] this.deadBand = Math.min(Math.abs(this.rangeNeutralMin), Math.abs(this.rangeNeutralMax)) // validate range settings, they need to be in sequence, or the logic will break @@ -84,12 +87,22 @@ export class JoyConController extends ControllerAbstract { this.reverseSpeedMap ) - window.addEventListener('gamepadconnected', this.updateScrollPosition.bind(this)) - window.addEventListener('gamepaddisconnected', this.updateScrollPosition.bind(this)) + this.updateScrollPositionBound = this.updateScrollPosition.bind(this) + + window.addEventListener('gamepadconnected', this.updateScrollPositionBound) + window.addEventListener('gamepaddisconnected', this.updateScrollPositionBound) } public destroy(): void { - // Nothing + if (this.updateScrollPositionBound) { + window.removeEventListener('gamepadconnected', this.updateScrollPositionBound) + window.removeEventListener('gamepaddisconnected', this.updateScrollPositionBound) + } + + if (this.updateSpeedHandle !== null) { + window.cancelAnimationFrame(this.updateSpeedHandle) + this.updateSpeedHandle = null + } } public onKeyDown(_e: KeyboardEvent): void { // Nothing diff --git a/packages/webui/src/client/ui/Prompter/controller/manager.ts b/packages/webui/src/client/ui/Prompter/controller/manager.ts index ea0479c96a..e374625ef2 100644 --- a/packages/webui/src/client/ui/Prompter/controller/manager.ts +++ b/packages/webui/src/client/ui/Prompter/controller/manager.ts @@ -6,6 +6,7 @@ import { JoyConController } from './joycon-device.js' import { KeyboardController } from './keyboard-device.js' import { ShuttleKeyboardController } from './shuttle-keyboard-device.js' import { ShuttleWebHidController } from './shuttle-webhid-device.js' +import { XboxController } from './xbox-controller-device.js' export class PrompterControlManager { private _view: PrompterViewContent @@ -21,24 +22,27 @@ export class PrompterControlManager { window.addEventListener('mouseup', this._onMouseKeyUp) if (Array.isArray(this._view.configOptions.mode)) { - if (this._view.configOptions.mode.indexOf(PrompterConfigMode.MOUSE) > -1) { + if (this._view.configOptions.mode.includes(PrompterConfigMode.MOUSE)) { this._controllers.push(new MouseIshController(this._view)) } - if (this._view.configOptions.mode.indexOf(PrompterConfigMode.KEYBOARD) > -1) { + if (this._view.configOptions.mode.includes(PrompterConfigMode.KEYBOARD)) { this._controllers.push(new KeyboardController(this._view)) } - if (this._view.configOptions.mode.indexOf(PrompterConfigMode.SHUTTLEKEYBOARD) > -1) { + if (this._view.configOptions.mode.includes(PrompterConfigMode.SHUTTLEKEYBOARD)) { this._controllers.push(new ShuttleKeyboardController(this._view)) } - if (this._view.configOptions.mode.indexOf(PrompterConfigMode.PEDAL) > -1) { + if (this._view.configOptions.mode.includes(PrompterConfigMode.PEDAL)) { this._controllers.push(new MidiPedalController(this._view)) } - if (this._view.configOptions.mode.indexOf(PrompterConfigMode.JOYCON) > -1) { + if (this._view.configOptions.mode.includes(PrompterConfigMode.JOYCON)) { this._controllers.push(new JoyConController(this._view)) } - if (this._view.configOptions.mode.indexOf(PrompterConfigMode.SHUTTLEWEBHID) > -1) { + if (this._view.configOptions.mode.includes(PrompterConfigMode.SHUTTLEWEBHID)) { this._controllers.push(new ShuttleWebHidController(this._view)) } + if (this._view.configOptions.mode.includes(PrompterConfigMode.XBOX)) { + this._controllers.push(new XboxController(this._view)) + } } if (this._controllers.length === 0) { diff --git a/packages/webui/src/client/ui/Prompter/controller/xbox-controller-device.ts b/packages/webui/src/client/ui/Prompter/controller/xbox-controller-device.ts new file mode 100644 index 0000000000..5010b07189 --- /dev/null +++ b/packages/webui/src/client/ui/Prompter/controller/xbox-controller-device.ts @@ -0,0 +1,306 @@ +import { ControllerAbstract } from './lib.js' +import { PrompterConfigMode, PrompterViewContent } from '../PrompterView.js' +import Spline from 'cubic-spline' +import { logger } from '../../../lib/logging.js' + +/** + * This class handles control of the prompter using an Xbox controller + * via the HTML5 Gamepad API. + * + * Controls: + * - Right Trigger (RT): Scroll forward (speed proportional to trigger pressure) + * - Left Trigger (LT): Scroll backward (speed proportional to trigger pressure) + * - A Button: Take (go to next part) + * - B Button: Go to Live/On-Air + * - X Button: Go to previous segment + * - Y Button: Go to following segment + * - Left Bumper (LB): Go to top + * - Right Bumper (RB): Go to Next + * - D-Pad Up/Down: Fine scroll control + */ +export class XboxController extends ControllerAbstract { + private readonly prompterView: PrompterViewContent + + // Speed maps for trigger input (0-1 range) + private readonly speedMap: number[] + private readonly reverseSpeedMap: number[] + + // Trigger dead zones + private readonly triggerDeadZone: number + + private readonly speedSpline: Spline | undefined + private readonly reverseSpeedSpline: Spline | undefined + + private updateSpeedHandle: number | null = null + private currentPosition = 0 + private lastInputValue = '' + private lastButtonStates: { [index: number]: boolean[] } = {} + + // Track if take was recently pressed to prevent rapid-fire + private readonly takeDebounceTime = 500 // ms + private lastTakeTime = 0 + + // Bound event handlers for cleanup + private readonly onGamepadConnectedBound: (e: GamepadEvent) => void + private readonly onGamepadDisconnectedBound: (e: GamepadEvent) => void + + constructor(view: PrompterViewContent) { + super() + this.prompterView = view + + // Assign params from URL or fall back to defaults + this.speedMap = view.configOptions.xbox_speedMap || [2, 3, 5, 6, 8, 12, 18, 45] + this.reverseSpeedMap = view.configOptions.xbox_reverseSpeedMap || [2, 3, 5, 6, 8, 12, 18, 45] + this.triggerDeadZone = view.configOptions.xbox_triggerDeadZone ?? 0.1 + + // Create splines for smooth speed interpolation + // Forward speed spline (for right trigger, 0-1 range) + this.speedSpline = new Spline( + this.speedMap.map((_y, index, array) => (1 / (array.length - 1)) * index), + this.speedMap + ) + + // Reverse speed spline (for left trigger, 0-1 range) + this.reverseSpeedSpline = new Spline( + this.reverseSpeedMap.map((_y, index, array) => (1 / (array.length - 1)) * index), + this.reverseSpeedMap + ) + + this.onGamepadConnectedBound = this.onGamepadConnected.bind(this) + this.onGamepadDisconnectedBound = this.onGamepadDisconnected.bind(this) + + window.addEventListener('gamepadconnected', this.onGamepadConnectedBound) + window.addEventListener('gamepaddisconnected', this.onGamepadDisconnectedBound) + + // Start polling if a controller is already connected + this.startPolling() + } + + public destroy(): void { + window.removeEventListener('gamepadconnected', this.onGamepadConnectedBound) + window.removeEventListener('gamepaddisconnected', this.onGamepadDisconnectedBound) + + if (this.updateSpeedHandle !== null) { + window.cancelAnimationFrame(this.updateSpeedHandle) + this.updateSpeedHandle = null + } + } + + public onKeyDown(_e: KeyboardEvent): void { + // Nothing - Xbox controller uses gamepad API + } + + public onKeyUp(_e: KeyboardEvent): void { + // Nothing + } + + public onMouseKeyDown(_e: MouseEvent): void { + // Nothing + } + + public onMouseKeyUp(_e: MouseEvent): void { + // Nothing + } + + public onWheel(_e: WheelEvent): void { + // Nothing + } + + private onGamepadConnected(e: GamepadEvent): void { + if (this.isXboxController(e.gamepad)) { + logger.info(`Xbox controller connected: ${e.gamepad.id}`) + this.startPolling() + } + } + + private onGamepadDisconnected(e: GamepadEvent): void { + if (this.isXboxController(e.gamepad)) { + logger.info(`Xbox controller disconnected: ${e.gamepad.id}`) + } + } + + private isXboxController(gamepad: Gamepad): boolean { + // Xbox controllers typically have these vendor/product patterns + // Also match generic "xinput" controllers and "Xbox" in the name + const id = gamepad.id.toLowerCase() + return ( + id.includes('xbox') || + id.includes('xinput') || + id.includes('045e') || // Microsoft vendor ID + id.includes('microsoft') || + // Standard gamepad mapping with 4 axes and 17 buttons is typical for Xbox + (gamepad.mapping === 'standard' && gamepad.axes.length >= 4 && gamepad.buttons.length >= 17) + ) + } + + private getXboxControllers(): Gamepad[] { + const controllers: Gamepad[] = [] + if (navigator.getGamepads) { + const gamepads = navigator.getGamepads() + if (gamepads) { + for (const gamepad of gamepads) { + if (gamepad && gamepad.connected && this.isXboxController(gamepad)) { + controllers.push(gamepad) + } + } + } + } + return controllers + } + + private startPolling(): void { + if (this.updateSpeedHandle === null) { + this.updateScrollPosition() + } + } + + /** + * Xbox Controller Standard Mapping: + * Buttons: + * 0: A + * 1: B + * 2: X + * 3: Y + * 4: LB (Left Bumper) + * 5: RB (Right Bumper) + * 6: LT (Left Trigger) - also available as axis + * 7: RT (Right Trigger) - also available as axis + * 8: Back/View + * 9: Start/Menu + * 10: Left Stick Press + * 11: Right Stick Press + * 12: D-Pad Up + * 13: D-Pad Down + * 14: D-Pad Left + * 15: D-Pad Right + * 16: Xbox/Guide button + * + * Axes: + * 0: Left Stick X (-1 left, 1 right) + * 1: Left Stick Y (-1 up, 1 down) + * 2: Right Stick X (-1 left, 1 right) + * 3: Right Stick Y (-1 up, 1 down) + */ + + private handleButtons(gamepad: Gamepad): void { + const buttonHistory = this.lastButtonStates[gamepad.index] + const currentButtons = gamepad.buttons.map((b) => b.pressed) + + // First time seeing this controller + if (!buttonHistory) { + this.lastButtonStates[gamepad.index] = currentButtons + return + } + + // Check for button state changes + currentButtons.forEach((pressed, index) => { + const wasPressed = buttonHistory[index] + if (pressed && !wasPressed) { + this.onButtonPressed(index, gamepad) + } + }) + + this.lastButtonStates[gamepad.index] = currentButtons + } + + private onButtonPressed(buttonIndex: number, _gamepad: Gamepad): void { + const now = Date.now() + + switch (buttonIndex) { + case 0: // A Button - Take + if (now - this.lastTakeTime > this.takeDebounceTime) { + this.lastTakeTime = now + this.prompterView.take('Xbox Controller A Button') + } + break + case 1: // B Button - Go to Live/On-Air + this.prompterView.scrollToLive() + break + case 2: // X Button - Go to previous segment + this.prompterView.scrollToPrevious() + break + case 3: // Y Button - Go to following segment + this.prompterView.scrollToFollowing() + break + case 4: // LB - Go to top + window.scrollTo({ top: 0, behavior: 'instant' }) + break + case 5: // RB - Go to Next + this.prompterView.scrollToNext() + break + case 12: // D-Pad Up - scroll up a bit + window.scrollBy({ top: -100, behavior: 'smooth' }) + break + case 13: // D-Pad Down - scroll down a bit + window.scrollBy({ top: 100, behavior: 'smooth' }) + break + } + } + + private calculateSpeed(controllers: Gamepad[]): number { + if (!this.speedSpline || !this.reverseSpeedSpline) return 0 + + let speed = 0 + + for (const controller of controllers) { + // Get trigger values + // In standard mapping, triggers are buttons 6 (LT) and 7 (RT) with value 0-1 + const leftTrigger = controller.buttons[6]?.value ?? 0 + const rightTrigger = controller.buttons[7]?.value ?? 0 + + // Apply dead zone + const effectiveLeft = leftTrigger > this.triggerDeadZone ? leftTrigger : 0 + const effectiveRight = rightTrigger > this.triggerDeadZone ? rightTrigger : 0 + + // Store for debugging + this.lastInputValue = `LT: ${effectiveLeft.toFixed(2)}, RT: ${effectiveRight.toFixed(2)}` + + // Calculate speed from triggers + // Right trigger = forward (positive speed) + // Left trigger = backward (negative speed) + if (effectiveRight > 0) { + speed += Math.round(this.speedSpline.at(effectiveRight)) + } + if (effectiveLeft > 0) { + speed -= Math.round(this.reverseSpeedSpline.at(effectiveLeft)) + } + } + + return speed + } + + private updateScrollPosition(): void { + const controllers = this.getXboxControllers() + + if (controllers.length > 0) { + // Handle button presses + for (const controller of controllers) { + this.handleButtons(controller) + } + + // Calculate and apply scroll speed + const speed = this.calculateSpeed(controllers) + + if (speed !== 0) { + window.scrollBy({ top: speed, behavior: 'instant' }) + } + + const scrollPosition = window.scrollY + if (speed !== 0 && this.currentPosition === scrollPosition) { + // We tried to move but couldn't (reached end) + // Could add haptic feedback here if supported + } + this.currentPosition = scrollPosition + + // Debug output + this.prompterView.DEBUG_controllerState({ + source: PrompterConfigMode.XBOX, + lastSpeed: speed, + lastEvent: this.lastInputValue, + }) + } + + // Continue polling + this.updateSpeedHandle = window.requestAnimationFrame(this.updateScrollPosition.bind(this)) + } +} diff --git a/packages/webui/src/client/ui/RundownView.tsx b/packages/webui/src/client/ui/RundownView.tsx index 9573aec79f..d795959a66 100644 --- a/packages/webui/src/client/ui/RundownView.tsx +++ b/packages/webui/src/client/ui/RundownView.tsx @@ -27,7 +27,7 @@ import { maintainFocusOnPartInstance, scrollToPartInstance, getHeaderHeight, -} from '../lib/viewPort' +} from '../lib/viewPort.js' import { AfterBroadcastForm } from './AfterBroadcastForm.js' import { RundownRightHandControls } from './RundownView/RundownRightHandControls.js' import { PeripheralDevicesAPI } from '../lib/clientAPI.js' @@ -38,7 +38,7 @@ import { } from './RundownView/RundownNotifier.js' import { NotificationCenterPanel } from '../lib/notifications/NotificationCenterPanel.js' import { NotificationCenter, NoticeLevel, Notification } from '../lib/notifications/notifications.js' -import { SupportPopUp } from './SupportPopUp' +import { SupportPopUp } from './SupportPopUp.js' import { KeyboardFocusIndicator } from '../lib/KeyboardFocusIndicator.js' import { PeripheralDeviceType } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' import { doUserAction, UserAction } from '../lib/clientUserAction.js' @@ -111,6 +111,7 @@ import { useMiniShelfAdlibsData } from './RundownView/useQueueMiniShelfAdlib.js' import { RundownViewContextProviders } from './RundownView/RundownViewContextProviders.js' import { AnimatePresence } from 'motion/react' import { UserError } from '@sofie-automation/corelib/dist/error' +import { DragContextProvider } from './RundownView/DragContextProvider.js' const HIDE_NOTIFICATIONS_AFTER_MOUNT: number | undefined = 5000 @@ -1367,213 +1368,216 @@ const RundownViewContent = translateWithTracker - - {(selectionContext) => { - return ( -
0, - })} - style={this.getStyle()} - onWheelCapture={this.onWheel} - onContextMenu={this.onContextMenuTop} - > - {this.renderSegmentsList()} - - {this.props.matchedSegments && - this.props.matchedSegments.length > 0 && - this.props.userPermissions.studio && - studio.settings.enableEvaluationForm && } - - {!this.props.hideRundownHeader && ( + + + {(selectionContext) => { + return ( +
0, + })} + style={this.getStyle()} + onWheelCapture={this.onWheel} + onContextMenu={this.onContextMenuTop} + > + {this.renderSegmentsList()} + + {this.props.matchedSegments && + this.props.matchedSegments.length > 0 && + this.props.userPermissions.studio && + studio.settings.enableEvaluationForm && } + + {!this.props.hideRundownHeader && ( + + r._id)} + firstRundown={this.props.rundowns[0]} + onActivate={this.onActivate} + inActiveRundownView={this.props.inActiveRundownView} + currentRundown={currentRundown} + layout={this.props.selectedHeaderLayout} + showStyleBase={showStyleBase} + showStyleVariant={showStyleVariant} + /> + + )} - r._id)} - firstRundown={this.props.rundowns[0]} - onActivate={this.onActivate} - inActiveRundownView={this.props.inActiveRundownView} - currentRundown={currentRundown} - layout={this.props.selectedHeaderLayout} showStyleBase={showStyleBase} showStyleVariant={showStyleVariant} + onChangeBottomMargin={this.onChangeBottomMargin} + rundownLayout={this.props.selectedShelfLayout} + studio={studio} /> - )} - - - - - {this.props.userPermissions.studio && !Settings.disableBlurBorder && ( - -
-
- )} -
- - - - - {this.props.userPermissions.studio && currentRundown && ( - + {this.props.userPermissions.studio && !Settings.disableBlurBorder && ( + +
+
+ )} +
+ + - )} - - - - {this.state.isNotificationsCenterOpen && ( - + + {this.props.userPermissions.studio && currentRundown && ( + )} - {!this.state.isNotificationsCenterOpen && selectionContext.listSelectedElements().length > 0 && ( -
- -
- )} - - {this.state.isSupportPanelOpen && ( - -
- -
- - {t('Take a Snapshot')} - -
- {this.props.userPermissions.studio && ( - <> - -
- +
+ + + {this.state.isNotificationsCenterOpen && ( + + )} + {!this.state.isNotificationsCenterOpen && + selectionContext.listSelectedElements().length > 0 && ( +
+ +
)} - {this.props.userPermissions.studio && } - + + {this.state.isSupportPanelOpen && ( + +
+ +
+ + {t('Take a Snapshot')} + +
+ {this.props.userPermissions.studio && ( + <> + +
+ + )} + {this.props.userPermissions.studio && } +
+ )} +
+
+ + {this.props.userPermissions.studio && ( + )} -
-
- - {this.props.userPermissions.studio && ( - + + selectionContext.clearAndSetSelection(selection)} + studioMode={this.props.userPermissions.studio} + enablePlayFromAnywhere={!!studio.settings.enablePlayFromAnywhere} + enableQuickLoop={!!studio.settings.enableQuickLoop} + enableUserEdits={!!studio.settings.enableUserEdits} /> - )} - - - selectionContext.clearAndSetSelection(selection)} - studioMode={this.props.userPermissions.studio} - enablePlayFromAnywhere={!!studio.settings.enablePlayFromAnywhere} - enableQuickLoop={!!studio.settings.enableQuickLoop} - enableUserEdits={!!studio.settings.enableUserEdits} - /> - - - {this.state.isClipTrimmerOpen && - this.state.selectedPiece && - RundownUtils.isPieceInstance(this.state.selectedPiece) && - (selectedPieceRundown === undefined ? ( - this.setState({ selectedPiece: undefined })} - title={t('Rundown not found')} - acceptText={t('Close')} - > - {t('Rundown for piece "{{pieceLabel}}" could not be found.', { - pieceLabel: this.state.selectedPiece.instance.piece.name, - })} - - ) : ( - this.setState({ isClipTrimmerOpen: false })} - /> - ))} - - - - - - {this.props.playlist && this.props.studio && this.props.showStyleBase && ( - - )} - -
- ) - }} - { - // USE IN CASE OF DEBUGGING EMERGENCY - /* getDeveloperMode() &&
-
*/ - } -
+ + + {this.state.isClipTrimmerOpen && + this.state.selectedPiece && + RundownUtils.isPieceInstance(this.state.selectedPiece) && + (selectedPieceRundown === undefined ? ( + this.setState({ selectedPiece: undefined })} + title={t('Rundown not found')} + acceptText={t('Close')} + > + {t('Rundown for piece "{{pieceLabel}}" could not be found.', { + pieceLabel: this.state.selectedPiece.instance.piece.name, + })} + + ) : ( + this.setState({ isClipTrimmerOpen: false })} + /> + ))} + + + + + + {this.props.playlist && this.props.studio && this.props.showStyleBase && ( + + )} + +
+ ) + }} + { + // USE IN CASE OF DEBUGGING EMERGENCY + /* getDeveloperMode() &&
+
*/ + } +
+ ) } diff --git a/packages/webui/src/client/ui/RundownView/DragContext.ts b/packages/webui/src/client/ui/RundownView/DragContext.ts new file mode 100644 index 0000000000..4c237dfbda --- /dev/null +++ b/packages/webui/src/client/ui/RundownView/DragContext.ts @@ -0,0 +1,46 @@ +import { PartInstanceId, PieceInstanceId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { createContext } from 'react' +import { PieceUi } from '../SegmentContainer/withResolvedSegment' + +export interface IDragContext { + /** + * Indicate a drag operation on a piece has started + * @param piece The piece that is being dragged + * @param timeScale The current TimeScale of the segment + * @param position The position of the mouse + * @param elementOffset The x-coordinate of the element relative to the mouse position + * @param limitToSegment Whether the piece can be dragged to other segments (note: if the other segment does not have the right source layer the piece will look to have disappeared... consider omitting this is a todo) + */ + startDrag: ( + piece: PieceUi, + timeScale: number, + position: { x: number; y: number }, + elementOffset?: number, + limitToPart?: PartInstanceId, + limitToSegment?: SegmentId + ) => void + /** + * Indicate the part the mouse is on has changed + * @param partId The part id that the mouse is currently hovering on + * @param segmentId The segment the part currenly hover is in + * @param position The position of the part in absolute coords to the screen + */ + setHoveredPart: (partId: PartInstanceId, segmentId: SegmentId, position: { x: number; y: number }) => void + + /** + * Whether dragging is allowed, i.e. edit mode is active + */ + enabled: boolean + + /** + * PieceId of the piece that is being dragged + */ + pieceId: undefined | PieceInstanceId + + /** + * The piece with any local overrides coming from dragging it around (i.e. changed renderedInPoint) + */ + piece: undefined | PieceUi +} + +export const dragContext = createContext(undefined) // slay. diff --git a/packages/webui/src/client/ui/RundownView/DragContextProvider.tsx b/packages/webui/src/client/ui/RundownView/DragContextProvider.tsx new file mode 100644 index 0000000000..5020ac7ec3 --- /dev/null +++ b/packages/webui/src/client/ui/RundownView/DragContextProvider.tsx @@ -0,0 +1,195 @@ +import { PartInstanceId, PieceInstanceId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { PropsWithChildren, useCallback, useEffect, useMemo, useRef, useState } from 'react' +import { dragContext, IDragContext } from './DragContext.js' +import { PieceUi } from '../SegmentContainer/withResolvedSegment.js' +import { doUserAction, UserAction } from '../../lib/clientUserAction.js' +import { MeteorCall } from '../../lib/meteorApi.js' +import { TFunction } from 'i18next' +import { UIParts } from '../Collections.js' +import { Segments } from '../../collections/index.js' +import { literal } from '@sofie-automation/shared-lib/dist/lib/lib' +import { DefaultUserOperationRetimePiece, DefaultUserOperationsTypes } from '@sofie-automation/blueprints-integration' +import RundownViewEventBus, { + RundownViewEvents, + EditModeEvent, +} from '@sofie-automation/meteor-lib/dist/triggers/RundownViewEventBus' + +const DRAG_TIMEOUT = 10000 + +interface Props { + t: TFunction +} + +// notes: this doesn't limit dragging between rundowns right now but I'm not sure if the ingest stage will be happy with that - mint +export function DragContextProvider({ t, children }: PropsWithChildren): JSX.Element { + const [pieceId, setPieceId] = useState(undefined) + const [piece, setPiece] = useState(undefined) + + const [enabled, setEnabled] = useState(false) + + const partIdRef = useRef(undefined) + const positionRef = useRef({ x: 0, y: 0 }) + const segmentIdRef = useRef(undefined) + const limitToPartRef = useRef(undefined) + const dragTimeoutRef = useRef | null>(null) + + const startDrag = useCallback( + ( + ogPiece: PieceUi, + timeScale: number, + pos: { x: number; y: number }, + elementOffset?: number, + limitToPart?: PartInstanceId, + limitToSegment?: SegmentId + ) => { + if (pieceId) return // a drag is currently in progress.... + + // Clear any existing drag timeout from a previous drag + if (dragTimeoutRef.current) { + clearTimeout(dragTimeoutRef.current) + dragTimeoutRef.current = null + } + + const inPoint = ogPiece.renderedInPoint ?? 0 + segmentIdRef.current = limitToSegment + limitToPartRef.current = limitToPart + positionRef.current = pos + setPieceId(ogPiece.instance._id) + + let localPiece = ogPiece // keep a copy of the overriden piece because react does not let us access the state of the context easily + + const onMove = (e: MouseEvent) => { + const newInPoint = + (!partIdRef.current ? inPoint : (elementOffset ?? 0) / timeScale) + + (e.clientX - positionRef.current.x) / timeScale + + localPiece = { + ...ogPiece, + instance: { ...ogPiece.instance, partInstanceId: partIdRef.current ?? ogPiece.instance.partInstanceId }, + renderedInPoint: newInPoint, + } + setPiece(localPiece) + } + + const onMouseUp = (e: MouseEvent) => { + // process the drag + if (!localPiece || localPiece.renderedInPoint === ogPiece.renderedInPoint) return cleanup() + + // find the parts so we can get their externalId + const startPartId = localPiece.instance.piece.startPartId // this could become a funny thing with infinites + const part = startPartId ? UIParts.findOne(startPartId) : undefined + const oldPart = + startPartId === ogPiece.instance.piece.startPartId + ? part + : ogPiece.instance.piece.startPartId + ? UIParts.findOne(ogPiece.instance.piece.startPartId) + : undefined + if (!part) return cleanup() // tough to continue without a parent for the piece + + // find the Segment's External ID + const segment = Segments.findOne(part?.segmentId) + const oldSegment = part?.segmentId === oldPart?.segmentId ? segment : Segments.findOne(oldPart?.segmentId) + if (!segment) return cleanup() + + const operationTarget = { + segmentExternalId: oldSegment?.externalId, + partExternalId: oldPart?.externalId, + pieceExternalId: ogPiece.instance.piece.externalId, + } + doUserAction( + t, + e, + UserAction.EXECUTE_USER_OPERATION, + (e, ts) => + MeteorCall.userAction.executeUserChangeOperation( + e, + ts, + part.rundownId, + operationTarget, + literal({ + id: DefaultUserOperationsTypes.RETIME_PIECE, + payload: { + segmentExternalId: segment.externalId, + partExternalId: part.externalId, + + inPoint: localPiece.renderedInPoint ?? inPoint, + }, + }) + ), + () => { + cleanup() + } + ) + } + + const cleanup = () => { + // Clear the drag timeout if it's still pending + if (dragTimeoutRef.current) { + clearTimeout(dragTimeoutRef.current) + dragTimeoutRef.current = null + } + // detach from the mouse + document.removeEventListener('mousemove', onMove) + document.removeEventListener('mouseup', onMouseUp) + // unset state - note: for ux reasons this runs after the backend operation has returned a result + setPieceId(undefined) + setPiece(undefined) + partIdRef.current = undefined + segmentIdRef.current = undefined + } + + document.addEventListener('mousemove', onMove) + document.addEventListener('mouseup', onMouseUp) + + dragTimeoutRef.current = setTimeout(() => { + // after the timeout we want to bail out in case something went wrong + cleanup() + }, DRAG_TIMEOUT) + }, + [pieceId, t] + ) + + const setHoveredPart = useCallback( + (updatedPartId: PartInstanceId, segmentId: SegmentId, pos: { x: number; y: number }) => { + if (!pieceId) return + if (updatedPartId === piece?.instance.partInstanceId) return + if (segmentIdRef.current && segmentIdRef.current !== segmentId) return + if (limitToPartRef.current && limitToPartRef.current !== updatedPartId) return + + partIdRef.current = updatedPartId + positionRef.current = pos + }, + [pieceId, piece?.instance.partInstanceId] + ) + + const onSetEditMode = useCallback((e: EditModeEvent) => { + if (e.state === 'toggle') { + setEnabled((s) => !s) + } else { + setEnabled(e.state) + } + }, []) + + useEffect(() => { + RundownViewEventBus.on(RundownViewEvents.EDIT_MODE, onSetEditMode) + return () => { + RundownViewEventBus.off(RundownViewEvents.EDIT_MODE, onSetEditMode) + } + }, [onSetEditMode]) + + const ctx = useMemo( + () => + literal({ + pieceId, + piece, + + enabled, + + startDrag, + setHoveredPart, + }), + [pieceId, piece, enabled, startDrag, setHoveredPart] + ) + + return {children} +} diff --git a/packages/webui/src/client/ui/SegmentList/LinePart.tsx b/packages/webui/src/client/ui/SegmentList/LinePart.tsx index 0c89801e0d..67c79f56f9 100644 --- a/packages/webui/src/client/ui/SegmentList/LinePart.tsx +++ b/packages/webui/src/client/ui/SegmentList/LinePart.tsx @@ -17,6 +17,7 @@ import { LinePartTitle } from './LinePartTitle.js' import { TimingDataResolution, TimingTickResolution, useTiming } from '../RundownView/RundownTiming/withTiming.js' import { RundownTimingContext, getPartInstanceTimingId } from '../../lib/rundownTiming.js' import { LoopingIcon } from '../../lib/ui/icons/looping.js' +import { isPartInstanceInvalid } from '../../lib/partInstanceUtil.js' interface IProps { segment: SegmentUi @@ -80,6 +81,9 @@ export function LinePart({ const isInsideQuickLoop = (timingDurations.partsInQuickLoop || {})[timingId] const isOutsideActiveQuickLoop = isPlaylistLooping && !isInsideQuickLoop && !isNextPart && !hasAlreadyPlayed + // Check for both planned and runtime invalidReason + const isInvalid = isPartInstanceInvalid(part.instance) + const getPartContext = useCallback(() => { const partElement = document.querySelector('#' + SegmentTimelinePartElementId + part.instance._id) const partDocumentOffset = getElementDocumentOffset(partElement) @@ -137,7 +141,7 @@ export function LinePart({ 'segment-opl__part--has-played': hasAlreadyPlayed && (!isPlaylistLooping || !isInsideQuickLoop), 'segment-opl__part--outside-quickloop': isOutsideActiveQuickLoop, 'segment-opl__part--quickloop-start': isQuickLoopStart, - 'segment-opl__part--invalid': part.instance.part.invalid, + 'segment-opl__part--invalid': isInvalid, 'segment-opl__part--timing-sibling': isPreceededByTimingGroupSibling, }), //@ts-expect-error A Data attribute is perfectly fine diff --git a/packages/webui/src/client/ui/SegmentList/LinePartTimeline.tsx b/packages/webui/src/client/ui/SegmentList/LinePartTimeline.tsx index 57d534d855..2b09a93ef3 100644 --- a/packages/webui/src/client/ui/SegmentList/LinePartTimeline.tsx +++ b/packages/webui/src/client/ui/SegmentList/LinePartTimeline.tsx @@ -16,6 +16,7 @@ import { InvalidPartCover } from '../SegmentTimeline/Parts/InvalidPartCover.js' import { getPartInstanceTimingId } from '../../lib/rundownTiming.js' import { QuickLoopEnd } from './QuickLoopEnd.js' import { getShowHiddenSourceLayers } from '../../lib/localStorage.js' +import { getEffectiveInvalidReason, isPartInstanceInvalid } from '../../lib/partInstanceUtil.js' const TIMELINE_DEFAULT_BASE = 30 * 1000 @@ -100,7 +101,9 @@ export const LinePartTimeline: React.FC = function LinePartTimeline({ const willAutoNextIntoThisPart = isNext ? currentPartWillAutonext : part.willProbablyAutoNext const willAutoNextOut = !!part.instance.part.autoNext - const isInvalid = !!part.instance.part.invalid + // Check for both planned and runtime invalidReason + const effectiveInvalidReason = getEffectiveInvalidReason(part.instance) + const isInvalid = isPartInstanceInvalid(part.instance) const loop = mainPiece?.instance.piece.content?.loop const endsInFreeze = !part.instance.part.autoNext && !loop && !!mainPiece?.instance.piece.content?.sourceDuration @@ -140,8 +143,8 @@ export const LinePartTimeline: React.FC = function LinePartTimeline({ )} )} - {part.instance.part.invalid && !part.instance.part.gap && ( - + {isInvalid && !part.instance.part.gap && ( + )} {!isLive && !isInvalid && ( diff --git a/packages/webui/src/client/ui/SegmentStoryboard/StoryboardPart.tsx b/packages/webui/src/client/ui/SegmentStoryboard/StoryboardPart.tsx index 3a3b4202a8..42ed6ff729 100644 --- a/packages/webui/src/client/ui/SegmentStoryboard/StoryboardPart.tsx +++ b/packages/webui/src/client/ui/SegmentStoryboard/StoryboardPart.tsx @@ -23,6 +23,7 @@ import { AutoNextStatus } from '../RundownView/RundownTiming/AutoNextStatus.js' import { RundownTimingContext, getPartInstanceTimingId } from '../../lib/rundownTiming.js' import { TimingDataResolution, TimingTickResolution, useTiming } from '../RundownView/RundownTiming/withTiming.js' import { LoopingIcon } from '../../lib/ui/icons/looping.js' +import { getEffectiveInvalidReason, isPartInstanceInvalid } from '../../lib/partInstanceUtil.js' interface IProps { className?: string @@ -124,7 +125,9 @@ export function StoryboardPart({ useRundownViewEventBusListener(RundownViewEvents.HIGHLIGHT, onHighlight) - const isInvalid = part.instance.part.invalid + // Get effective invalidReason: planned (Part) takes precedence over runtime (PartInstance) + const effectiveInvalidReason = getEffectiveInvalidReason(part.instance) + const isInvalid = isPartInstanceInvalid(part.instance) const isFloated = part.instance.part.floated const isInsideQuickLoop = timingDurations.partsInQuickLoop?.[getPartInstanceTimingId(part.instance)] ?? false const isOutsideActiveQuickLoop = !isInsideQuickLoop && isPlaylistLooping && !isNextPart @@ -140,7 +143,7 @@ export function StoryboardPart({ 'invert-flash': highlight, 'segment-storyboard__part--next': isNextPart, 'segment-storyboard__part--live': isLivePart, - 'segment-storyboard__part--invalid': part.instance.part.invalid, + 'segment-storyboard__part--invalid': isInvalid, 'segment-storyboard__part--outside-quickloop': isOutsideActiveQuickLoop, 'segment-storyboard__part--quickloop-start': isQuickLoopStart, 'segment-storyboard__part--quickloop-end': isQuickLoopEnd, @@ -177,7 +180,7 @@ export function StoryboardPart({ )} {isInvalid ? ( - + ) : null} {isFloated ?
: null}
{part.instance.part.title}
@@ -185,7 +188,7 @@ export function StoryboardPart({
): JSX.Element { +export function InvalidPartCover({ className, invalidReason }: Readonly): JSX.Element { const element = React.createRef() const previewContext = useContext(PreviewPopUpContext) @@ -19,11 +22,11 @@ export function InvalidPartCover({ className, part }: Readonly): JSX.Ele return } - if (part.invalidReason?.message && !previewSession.current) { + if (invalidReason?.message && !previewSession.current) { previewSession.current = previewContext.requestPreview(e.target as HTMLDivElement, [ { type: 'warning', - content: part.invalidReason?.message, + content: invalidReason.message, }, ]) } diff --git a/packages/webui/src/client/ui/SegmentTimeline/Parts/SegmentTimelinePart.tsx b/packages/webui/src/client/ui/SegmentTimeline/Parts/SegmentTimelinePart.tsx index a3e12c01d0..28d3fa8e9a 100644 --- a/packages/webui/src/client/ui/SegmentTimeline/Parts/SegmentTimelinePart.tsx +++ b/packages/webui/src/client/ui/SegmentTimeline/Parts/SegmentTimelinePart.tsx @@ -41,6 +41,7 @@ import { UIStudio } from '@sofie-automation/meteor-lib/dist/api/studios' import { LIVE_LINE_TIME_PADDING } from '../Constants.js' import * as RundownResolver from '../../../lib/RundownResolver.js' import { Events as MOSEvents } from '../../../lib/data/mos/plugin-support.js' +import { getEffectiveInvalidReason, isPartInstanceInvalid } from '../../../lib/partInstanceUtil.js' export const SegmentTimelineLineElementId = 'rundown__segment__line__' export const SegmentTimelinePartElementId = 'rundown__segment__part__' @@ -662,6 +663,7 @@ export class SegmentTimelinePartClass extends React.Component )} {this.renderTimelineOutputGroups(this.props.part)} - {innerPart.invalid ? ( - + {isInvalid ? ( + ) : null} {innerPart.floated ?
: null} {this.props.playlist.nextTimeOffset && @@ -746,7 +756,7 @@ export class SegmentTimelinePartClass extends React.Component): JSX.Element { const { getPartContext, onMouseDown } = useMouseContext(props) + const dragCtx = useContext(dragContext) + + const pieces = + dragCtx?.piece && dragCtx.piece.sourceLayer?._id === props.layer._id + ? (props.layer.pieces ?? []).filter((p) => p.instance._id !== dragCtx.piece?.instance._id).concat(dragCtx.piece) + : props.layer.pieces + + const onMouseEnter: MouseEventHandler = (e) => { + if (!dragCtx) return + + const pos = (e.currentTarget as HTMLDivElement).getBoundingClientRect() + dragCtx.setHoveredPart(props.part.instance._id, props.segment._id, { x: pos.x, y: pos.y }) + } return ( ): JSX.Element { //@ts-expect-error A Data attribute is perfectly fine 'data-layer-id': props.layer._id, onMouseDownCapture: (e) => onMouseDown(e), + onMouseEnter, role: 'log', 'aria-live': 'assertive', 'aria-label': props.layer.name, @@ -107,9 +122,9 @@ export function SourceLayer(props: Readonly): JSX.Element { holdToDisplay={contextMenuHoldToDisplayTime()} collect={getPartContext} > - {props.layer.pieces !== undefined + {pieces !== undefined ? _.chain( - props.layer.pieces.filter((piece) => { + pieces.filter((piece) => { // filter only pieces belonging to this part return piece.instance.partInstanceId === props.part.instance._id ? // filter only pieces, that have not been hidden from the UI diff --git a/packages/webui/src/client/ui/SegmentTimeline/SourceLayerItem.tsx b/packages/webui/src/client/ui/SegmentTimeline/SourceLayerItem.tsx index 5c6f36f459..0de91e5571 100644 --- a/packages/webui/src/client/ui/SegmentTimeline/SourceLayerItem.tsx +++ b/packages/webui/src/client/ui/SegmentTimeline/SourceLayerItem.tsx @@ -1,6 +1,12 @@ import * as React from 'react' import { ISourceLayerUi, IOutputLayerUi, PartUi, PieceUi } from './SegmentTimelineContainer.js' -import { SourceLayerType, PieceLifespan, IBlueprintPieceType } from '@sofie-automation/blueprints-integration' +import { + SourceLayerType, + PieceLifespan, + IBlueprintPieceType, + UserEditingType, + DefaultUserOperationsTypes, +} from '@sofie-automation/blueprints-integration' import { RundownUtils } from '../../lib/rundown.js' import { DefaultLayerItemRenderer } from './Renderers/DefaultLayerItemRenderer.js' import { MicSourceRenderer } from './Renderers/MicSourceRenderer.js' @@ -20,6 +26,7 @@ import { ReadonlyDeep } from 'type-fest' import { useSelectedElementsContext } from '../RundownView/SelectedElementsContext.js' import { PieceContentStatusObj } from '@sofie-automation/corelib/dist/dataModel/PieceContentStatus' import { useCallback, useRef, useState, useEffect, useContext } from 'react' +import { dragContext } from '../RundownView/DragContext.js' import { convertSourceLayerItemToPreview, IPreviewPopUpSession, @@ -114,6 +121,11 @@ export const SourceLayerItem = (props: Readonly): JSX.Ele const [leftAnchoredWidth, setLeftAnchoredWidth] = useState(0) const [rightAnchoredWidth, setRightAnchoredWidth] = useState(0) + const dragCtx = useContext(dragContext) + const hasDraggableElement = !!piece.instance.piece.userEditOperations?.find( + (op) => op.type === UserEditingType.SOFIE && op.id === DefaultUserOperationsTypes.RETIME_PIECE + ) + const state = { highlight, showPreviewPopUp, @@ -164,6 +176,9 @@ export const SourceLayerItem = (props: Readonly): JSX.Ele ) const itemDblClick = useCallback( (e: React.MouseEvent) => { + e.preventDefault() + e.stopPropagation() + if (studio?.settings.enableUserEdits && !studio?.settings.allowPieceDirectPlay) { const pieceId = piece.instance.piece._id if (!selectElementContext.isSelected(pieceId)) { @@ -171,23 +186,42 @@ export const SourceLayerItem = (props: Readonly): JSX.Ele } else { selectElementContext.clearSelections() } - // Until a proper data structure, the only reference is a part. - // const partId = this.props.part.instance.part._id - // if (!selectElementContext.isSelected(partId)) { - // selectElementContext.clearAndSetSelection({ type: 'part', elementId: partId }) - // } else { - // selectElementContext.clearSelections() - // } } else if (typeof onDoubleClick === 'function') { onDoubleClick(piece, e) } }, [piece] ) - const itemMouseDown = useCallback((e: React.MouseEvent) => { - e.preventDefault() - e.stopPropagation() - }, []) + const itemMouseDown = useCallback( + (e: React.MouseEvent) => { + if (!hasDraggableElement) return + + if (dragCtx && dragCtx.enabled) { + e.preventDefault() + e.stopPropagation() + + const targetPos = (e.target as HTMLDivElement).getBoundingClientRect() + const retimeOp = piece.instance.piece.userEditOperations?.find( + (op) => op.type === UserEditingType.SOFIE && op.id === DefaultUserOperationsTypes.RETIME_PIECE + ) as any + + const limitToPart = retimeOp?.limitToCurrentPart ? piece.instance.partInstanceId : undefined + + dragCtx.startDrag( + piece, + timeScale, + { + x: e.clientX, + y: e.clientY, + }, + targetPos.x - e.clientX, + limitToPart, + part.instance.segmentId + ) + } + }, + [piece, timeScale, dragCtx, part] + ) const itemMouseUp = useCallback((e: any) => { const eM = e as MouseEvent if (eM.ctrlKey === true) { @@ -531,29 +565,31 @@ export const SourceLayerItem = (props: Readonly): JSX.Ele ...props, ...state, } + // Key cannot be part of a spread operator, therefore needs to be kept out of elProps + const elKey = unprotectString(piece.instance._id) switch (layer.type) { case SourceLayerType.SCRIPT: // case SourceLayerType.MIC: - return + return case SourceLayerType.VT: case SourceLayerType.LIVE_SPEAK: - return + return case SourceLayerType.GRAPHICS: case SourceLayerType.LOWER_THIRD: case SourceLayerType.STUDIO_SCREEN: - return + return case SourceLayerType.SPLITS: - return + return case SourceLayerType.TRANSITION: // TODOSYNC: TV2 uses other renderers, to be discussed. - return + return case SourceLayerType.LOCAL: - return + return default: - return + return } } @@ -575,8 +611,10 @@ export const SourceLayerItem = (props: Readonly): JSX.Ele layer.type, part.partId, highlight, - elementWidth + elementWidth, // this.state + undefined, + hasDraggableElement && dragCtx?.enabled )} data-obj-id={piece.instance._id} ref={setRef} diff --git a/packages/webui/src/client/ui/Settings/DevicePackageManagerSettings.tsx b/packages/webui/src/client/ui/Settings/DevicePackageManagerSettings.tsx index 82d0408991..738359eac7 100644 --- a/packages/webui/src/client/ui/Settings/DevicePackageManagerSettings.tsx +++ b/packages/webui/src/client/ui/Settings/DevicePackageManagerSettings.tsx @@ -23,38 +23,55 @@ export const DevicePackageManagerSettings: React.FC PeripheralDevices.findOne(deviceId), [deviceId], undefined) - const reloadingNow = useRef(false) + const reloadingNow = useRef(null) const [status, setStatus] = useState(undefined) - const reloadStatus = useCallback((silent = false) => { - if (reloadingNow.current) return // if there is a method currently being executed, skip + const reloadStatus = useCallback( + (silent = false) => { + if (reloadingNow.current === deviceId) return // if there is a method currently being executed, skip - reloadingNow.current = true + reloadingNow.current = deviceId - MeteorCall.client - .callBackgroundPeripheralDeviceFunction(deviceId, 1000, 'getExpetationManagerStatus') - .then((result: Status) => setStatus(result)) - .catch((error) => { - if (silent) { - logger.error('callBackgroundPeripheralDeviceFunction getExpetationManagerStatus', error) - return - } + MeteorCall.client + .callBackgroundPeripheralDeviceFunction(deviceId, 1000, 'getExpetationManagerStatus') + .then((result: Status) => { + if (reloadingNow.current !== deviceId) return // if the deviceId has changed, abort - doModalDialog({ - message: t('There was an error: {{error}}', { error: error.toString() }), - title: t('Error'), - warning: true, - onAccept: () => { - // Do nothing - }, + setStatus(result) }) - }) - .finally(() => { - reloadingNow.current = false - }) - }, []) + .catch((error) => { + if (reloadingNow.current !== deviceId) return // if the deviceId has changed, abort + + if (silent) { + logger.error('callBackgroundPeripheralDeviceFunction getExpetationManagerStatus', error) + return + } + + doModalDialog({ + message: t('There was an error: {{error}}', { error: error.toString() }), + title: t('Error'), + warning: true, + onAccept: () => { + // Do nothing + }, + }) + }) + .finally(() => { + if (reloadingNow.current === deviceId) { + reloadingNow.current = null + } + }) + }, + [deviceId] + ) useEffect(() => { + // Clear cached status when deviceId changes + setStatus(undefined) + + // Trigger a load now + reloadStatus(true) + const reloadInterval = Meteor.setInterval(() => { if (deviceId) { reloadStatus(true) @@ -64,7 +81,7 @@ export const DevicePackageManagerSettings: React.FC { Meteor.clearInterval(reloadInterval) } - }, []) + }, [deviceId, reloadStatus]) function killApp(e: string, appId: string) { MeteorCall.client diff --git a/packages/webui/src/client/ui/Settings/components/triggeredActions/actionEditors/actionSelector/ActionSelector.tsx b/packages/webui/src/client/ui/Settings/components/triggeredActions/actionEditors/actionSelector/ActionSelector.tsx index 1d457cdd02..1ad2c23f61 100644 --- a/packages/webui/src/client/ui/Settings/components/triggeredActions/actionEditors/actionSelector/ActionSelector.tsx +++ b/packages/webui/src/client/ui/Settings/components/triggeredActions/actionEditors/actionSelector/ActionSelector.tsx @@ -93,6 +93,17 @@ function getArguments(t: TFunction, action: SomeAction): string[] { assertNever(action.state) } break + case ClientActions.editMode: + if (action.state === true) { + result.push(t('Enable')) + } else if (action.state === false) { + result.push(t('Disable')) + } else if (action.state === 'toggle') { + result.push(t('Toggle')) + } else { + assertNever(action.state) + } + break case ClientActions.goToOnAirLine: break case ClientActions.rewindSegments: @@ -147,6 +158,8 @@ function hasArguments(action: SomeAction): boolean { return false case ClientActions.shelf: return true + case ClientActions.editMode: + return true case ClientActions.goToOnAirLine: return false case ClientActions.rewindSegments: @@ -193,6 +206,8 @@ function actionToLabel(t: TFunction, action: SomeAction['action']): string { return t('Switch Route Set') case ClientActions.shelf: return t('Shelf') + case ClientActions.editMode: + return t('Edit Mode') case ClientActions.rewindSegments: return t('Rewind Segments to start') case ClientActions.goToOnAirLine: @@ -376,6 +391,40 @@ function getActionParametersEditor( />
) + case ClientActions.editMode: + return ( +
+ + + classNames="input text-input input-m" + value={action.state} + // placholder={t('State')} + options={[ + { + name: t('Enable'), + value: true, + i: 0, + }, + { + name: t('Disable'), + value: false, + i: 1, + }, + { + name: t('Toggle'), + value: 'toggle', + i: 2, + }, + ]} + handleUpdate={(newVal) => { + onChange({ + ...action, + state: newVal, + }) + }} + /> +
+ ) case ClientActions.goToOnAirLine: return null case ClientActions.rewindSegments: diff --git a/packages/webui/src/client/ui/Shelf/AdLibPanel.tsx b/packages/webui/src/client/ui/Shelf/AdLibPanel.tsx index 006cf8083e..d350a1a0f3 100644 --- a/packages/webui/src/client/ui/Shelf/AdLibPanel.tsx +++ b/packages/webui/src/client/ui/Shelf/AdLibPanel.tsx @@ -128,6 +128,7 @@ function actionToAdLibPieceUi( uniquenessId: action.display.uniquenessId, lifespan: PieceLifespan.WithinPart, // value doesn't matter expectedPackages: action.expectedPackages, + invalid: action.invalid, }) } diff --git a/packages/webui/src/client/ui/Status/package-status/PackageStatus.tsx b/packages/webui/src/client/ui/Status/package-status/PackageStatus.tsx index 6675919fef..8b5180dac6 100644 --- a/packages/webui/src/client/ui/Status/package-status/PackageStatus.tsx +++ b/packages/webui/src/client/ui/Status/package-status/PackageStatus.tsx @@ -25,7 +25,7 @@ export const PackageStatus: React.FC<{ const { t } = useTranslation() const getPackageName = useCallback((): string => { - const p2: ExpectedPackage.Any = props.package as any + const p2 = props.package.package as ExpectedPackage.Any if (p2.type === ExpectedPackage.PackageType.MEDIA_FILE) { return p2.content.filePath || unprotectString(props.package._id) } else if (p2.type === ExpectedPackage.PackageType.QUANTEL_CLIP) { @@ -87,7 +87,7 @@ export const PackageStatus: React.FC<{ return 0 }) - }, props.statuses) + }, [props.statuses]) let offlineReasonMessage: string | undefined = undefined let connected = true diff --git a/packages/webui/src/client/ui/Status/package-status/index.tsx b/packages/webui/src/client/ui/Status/package-status/index.tsx index 5c9baa0d6f..4fea26d793 100644 --- a/packages/webui/src/client/ui/Status/package-status/index.tsx +++ b/packages/webui/src/client/ui/Status/package-status/index.tsx @@ -68,11 +68,6 @@ export const ExpectedPackagesStatus: React.FC<{}> = function ExpectedPackagesSta ) } function renderExpectedPackageStatuses() { - const packageRef: { [packageId: string]: ExpectedPackageDB } = {} - for (const expPackage of expectedPackages) { - packageRef[unprotectString(expPackage._id)] = expPackage - } - const packagesWithWorkStatuses: { [packageId: string]: { package: ExpectedPackageDB | undefined @@ -80,20 +75,33 @@ export const ExpectedPackagesStatus: React.FC<{}> = function ExpectedPackagesSta device: PeripheralDevice | undefined } } = {} + + for (const expPackage of expectedPackages) { + packagesWithWorkStatuses[unprotectString(expPackage._id)] = { + package: expPackage, + statuses: [], + device: undefined, + } + } + for (const work of expectedPackageWorkStatuses) { - const device = peripheralDevicesMap.get(work.deviceId) // todo: make this better: - const key = unprotectString(work.fromPackages[0]?.id) || 'unknown_work_' + work._id - // const referencedPackage = packageRef[packageId] - let packageWithWorkStatus = packagesWithWorkStatuses[key] - if (!packageWithWorkStatus) { - packagesWithWorkStatuses[key] = packageWithWorkStatus = { - package: packageRef[key] || undefined, - statuses: [], - device, + let fromPackageIds = work.fromPackages.map((p) => unprotectString(p.id)) + if (fromPackageIds.length === 0) fromPackageIds = ['unknown_work_' + work._id] + + for (const key of fromPackageIds) { + // const referencedPackage = packageRef[packageId] + let packageWithWorkStatus = packagesWithWorkStatuses[key] + if (!packageWithWorkStatus) { + packagesWithWorkStatuses[key] = packageWithWorkStatus = { + package: undefined, + statuses: [], + device: undefined, + } } + packageWithWorkStatus.statuses.push(work) + packageWithWorkStatus.device = peripheralDevicesMap.get(work.deviceId) } - packageWithWorkStatus.statuses.push(work) } for (const id of Object.keys(packagesWithWorkStatuses)) { @@ -150,6 +158,7 @@ export const ExpectedPackagesStatus: React.FC<{}> = function ExpectedPackagesSta function renderPackageContainerStatuses() { return packageContainerStatuses.map((packageContainerStatus) => { const device = peripheralDevicesMap.get(packageContainerStatus.deviceId) + console.log(device, packageContainerStatus.deviceId) return ( `--ignore ${pkg}`)); - } - await concurrently( [ { - command: `yarn build:try ${buildArgs.join(" ")}`, + command: `yarn build:try`, cwd: "packages", name: "PACKAGES-BUILD", prefixColor: "yellow", diff --git a/scripts/lib.js b/scripts/lib.js index 2c3b316bfa..58376c572a 100644 --- a/scripts/lib.js +++ b/scripts/lib.js @@ -1,9 +1,3 @@ -/** These are extra packages in the mono-repo, not necessary for Sofie Core development */ -const EXTRA_PACKAGES = [ - "@sofie-automation/openapi", - "live-status-gateway", - "mos-gateway", -]; const args = process.argv.slice(2); const config = { @@ -13,6 +7,5 @@ const config = { }; module.exports = { - EXTRA_PACKAGES, config, }; diff --git a/scripts/run.mjs b/scripts/run.mjs index 578b3f069a..a2a65ff3c6 100644 --- a/scripts/run.mjs +++ b/scripts/run.mjs @@ -1,7 +1,7 @@ import process from "process"; import fs from "fs"; import concurrently from "concurrently"; -import { EXTRA_PACKAGES, config } from "./lib.js"; +import { config } from "./lib.js"; function joinCommand(...parts) { return parts.filter((part) => !!part).join(" "); @@ -10,15 +10,9 @@ function joinCommand(...parts) { function watchPackages() { return [ { - command: joinCommand('yarn watch', - config.uiOnly - ? EXTRA_PACKAGES.map((pkg) => `--ignore ${pkg}`).join( - " " - ) - : "", - ), + command: 'yarn watch --preserveWatchOutput', cwd: "packages", - name: "PACKAGES-TSC", + name: "TSC", prefixColor: "red", }, ]; @@ -47,12 +41,6 @@ function watchMeteor() { const rootUrl = process.env.ROOT_URL ? new URL(process.env.ROOT_URL) : null return [ - { - command: "yarn watch-types --preserveWatchOutput", - cwd: "meteor", - name: "METEOR-TSC", - prefixColor: "blue", - }, { command: joinCommand( 'yarn debug', diff --git a/tsconfig.json b/tsconfig.json index de82c14f25..e75f73f213 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,3 +1,3 @@ { - "extends": "./meteor/tsconfig.json" + "extends": "./packages/tsconfig.json" }