diff --git a/meteor/__mocks__/defaultCollectionObjects.ts b/meteor/__mocks__/defaultCollectionObjects.ts index fa8a8934ed..faec0a06be 100644 --- a/meteor/__mocks__/defaultCollectionObjects.ts +++ b/meteor/__mocks__/defaultCollectionObjects.ts @@ -52,6 +52,11 @@ export function defaultRundownPlaylist(_id: RundownPlaylistId, studioId: StudioI type: 'none' as any, }, rundownIdsInOrder: [], + tTimers: [ + { index: 1, label: '', mode: null }, + { index: 2, label: '', mode: null }, + { index: 3, label: '', mode: null }, + ], } } export function defaultRundown( diff --git a/meteor/server/__tests__/cronjobs.test.ts b/meteor/server/__tests__/cronjobs.test.ts index c61e36bdcb..9133eb4439 100644 --- a/meteor/server/__tests__/cronjobs.test.ts +++ b/meteor/server/__tests__/cronjobs.test.ts @@ -618,6 +618,7 @@ describe('cronjobs', () => { type: PlaylistTimingType.None, }, activationId: protectString(''), + tTimers: [] as any, }) return { diff --git a/meteor/server/api/__tests__/cleanup.test.ts b/meteor/server/api/__tests__/cleanup.test.ts index 948f8eefb9..d27bded17c 100644 --- a/meteor/server/api/__tests__/cleanup.test.ts +++ b/meteor/server/api/__tests__/cleanup.test.ts @@ -203,7 +203,7 @@ async function setDefaultDatatoDB(env: DefaultEnvironment, now: number) { startSegmentId: segmentId, timelineObjectsString: '' as any, } - const pieceId = await Pieces.mutableCollection.insertAsync(piece) + await Pieces.mutableCollection.insertAsync(piece) await AdLibActions.mutableCollection.insertAsync({ _id: getRandomId(), @@ -265,22 +265,15 @@ async function setDefaultDatatoDB(env: DefaultEnvironment, now: number) { }) const packageId = await ExpectedPackages.mutableCollection.insertAsync({ _id: getRandomId(), - blueprintPackageId: '', - // @ts-expect-error bucketId is not a part of all ExpectedPackageDBs - bucketId, - content: {} as any, - contentVersionHash: '', - created: 0, - fromPieceType: '' as any, - layers: [], - pieceId, - rundownId, - segmentId, - sideEffect: {} as any, studioId, - sources: {} as any, - type: '' as any, - version: {} as any, + rundownId, + bucketId: null, + created: 0, + package: {} as any, + ingestSources: [], + playoutSources: { + pieceInstanceIds: [], + }, }) await ExpectedPackageWorkStatuses.insertAsync({ _id: getRandomId(), diff --git a/meteor/server/api/__tests__/externalMessageQueue.test.ts b/meteor/server/api/__tests__/externalMessageQueue.test.ts index 801220a8f8..1b5fb53f93 100644 --- a/meteor/server/api/__tests__/externalMessageQueue.test.ts +++ b/meteor/server/api/__tests__/externalMessageQueue.test.ts @@ -41,6 +41,7 @@ describe('Test external message queue static methods', () => { type: PlaylistTimingType.None, }, rundownIdsInOrder: [protectString('rundown_1')], + tTimers: [] as any, }) await Rundowns.mutableCollection.insertAsync({ _id: protectString('rundown_1'), diff --git a/meteor/server/api/__tests__/peripheralDevice.test.ts b/meteor/server/api/__tests__/peripheralDevice.test.ts index 3c819cf20a..594c44049c 100644 --- a/meteor/server/api/__tests__/peripheralDevice.test.ts +++ b/meteor/server/api/__tests__/peripheralDevice.test.ts @@ -78,6 +78,7 @@ describe('test peripheralDevice general API methods', () => { type: PlaylistTimingType.None, }, rundownIdsInOrder: [rundownID], + tTimers: [] as any, }) await Rundowns.mutableCollection.insertAsync({ _id: rundownID, diff --git a/meteor/server/api/ingest/debug.ts b/meteor/server/api/ingest/debug.ts index 37c2947233..160d62070b 100644 --- a/meteor/server/api/ingest/debug.ts +++ b/meteor/server/api/ingest/debug.ts @@ -8,7 +8,6 @@ import { QueueStudioJob } from '../../worker/worker' import { StudioJobs } from '@sofie-automation/corelib/dist/worker/studio' import { RundownPlaylistId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { MeteorDebugMethods } from '../../methods' -import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' MeteorDebugMethods({ /** @@ -47,31 +46,4 @@ MeteorDebugMethods({ segmentExternalId: segment.externalId, }) }, - /** - * Regenerate all the expected packages for all rundowns in the system. - * Additionally it will recreate any expectedPlayoutItems. - * This shouldn't be necessary as ingest will do this for each rundown as part of its workflow - */ - debug_recreateExpectedPackages: async () => { - const rundowns = (await Rundowns.findFetchAsync( - {}, - { - projection: { - _id: 1, - studioId: 1, - source: 1, - }, - } - )) as Array> - - await Promise.all( - rundowns - .filter((rundown) => rundown.source.type !== 'snapshot') - .map(async (rundown) => - runIngestOperation(rundown.studioId, IngestJobs.ExpectedPackagesRegenerate, { - rundownId: rundown._id, - }) - ) - ) - }, }) diff --git a/meteor/server/api/ingest/packageInfo.ts b/meteor/server/api/ingest/packageInfo.ts index 24fe870235..06c19dfa59 100644 --- a/meteor/server/api/ingest/packageInfo.ts +++ b/meteor/server/api/ingest/packageInfo.ts @@ -1,10 +1,7 @@ import { - ExpectedPackageDBFromBucketAdLib, - ExpectedPackageDBFromBucketAdLibAction, - ExpectedPackageDBFromStudioBaselineObjects, ExpectedPackageDBType, - ExpectedPackageFromRundown, - ExpectedPackageFromRundownBaseline, + ExpectedPackageDB, + ExpectedPackageIngestSourceBucket, } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { PackageInfoDB } from '@sofie-automation/corelib/dist/dataModel/PackageInfos' import { ExpectedPackages, Rundowns } from '../../collections' @@ -28,8 +25,10 @@ export async function onUpdatedPackageInfo(packageId: ExpectedPackageId, _doc: P return } - if (pkg.listenToPackageInfoUpdates) { - switch (pkg.fromPieceType) { + for (const source of pkg.ingestSources) { + if (!source.listenToPackageInfoUpdates) continue + + switch (source.fromPieceType) { case ExpectedPackageDBType.PIECE: case ExpectedPackageDBType.ADLIB_PIECE: case ExpectedPackageDBType.ADLIB_ACTION: @@ -41,39 +40,44 @@ export async function onUpdatedPackageInfo(packageId: ExpectedPackageId, _doc: P break case ExpectedPackageDBType.BUCKET_ADLIB: case ExpectedPackageDBType.BUCKET_ADLIB_ACTION: - onUpdatedPackageInfoForBucketItemDebounce(pkg) + onUpdatedPackageInfoForBucketItemDebounce(pkg, source) break case ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: onUpdatedPackageInfoForStudioBaselineDebounce(pkg) break default: - assertNever(pkg) + assertNever(source) break } } } const pendingRundownPackageUpdates = new Map>() -function onUpdatedPackageInfoForRundownDebounce(pkg: ExpectedPackageFromRundown | ExpectedPackageFromRundownBaseline) { - const existingEntry = pendingRundownPackageUpdates.get(pkg.rundownId) +function onUpdatedPackageInfoForRundownDebounce(pkg: ExpectedPackageDB) { + if (!pkg.rundownId) { + logger.error(`Updating ExpectedPackage "${pkg._id}" not possibe: missing rundownId`) + return + } + + const rundownId = pkg.rundownId + + const existingEntry = pendingRundownPackageUpdates.get(rundownId) if (existingEntry) { // already queued, add to the batch existingEntry.push(pkg._id) } else { - pendingRundownPackageUpdates.set(pkg.rundownId, [pkg._id]) + pendingRundownPackageUpdates.set(rundownId, [pkg._id]) } // TODO: Scaling - this won't batch correctly if package manager directs calls to multiple instances lazyIgnore( - `onUpdatedPackageInfoForRundown_${pkg.rundownId}`, + `onUpdatedPackageInfoForRundown_${rundownId}`, () => { - const packageIds = pendingRundownPackageUpdates.get(pkg.rundownId) + const packageIds = pendingRundownPackageUpdates.get(rundownId) if (packageIds) { - pendingRundownPackageUpdates.delete(pkg.rundownId) - onUpdatedPackageInfoForRundown(pkg.rundownId, packageIds).catch((e) => { - logger.error( - `Updating ExpectedPackages for Rundown "${pkg.rundownId}" failed: ${stringifyError(e)}` - ) + pendingRundownPackageUpdates.delete(rundownId) + onUpdatedPackageInfoForRundown(rundownId, packageIds).catch((e) => { + logger.error(`Updating ExpectedPackages for Rundown "${rundownId}" failed: ${stringifyError(e)}`) }) } }, @@ -108,19 +112,24 @@ async function onUpdatedPackageInfoForRundown( }) } -function onUpdatedPackageInfoForBucketItemDebounce( - pkg: ExpectedPackageDBFromBucketAdLib | ExpectedPackageDBFromBucketAdLibAction -) { +function onUpdatedPackageInfoForBucketItemDebounce(pkg: ExpectedPackageDB, source: ExpectedPackageIngestSourceBucket) { + if (!pkg.bucketId) { + logger.error(`Updating ExpectedPackage "${pkg._id}" for Bucket "${pkg.bucketId}" not possible`) + return + } + + const bucketId = pkg.bucketId + lazyIgnore( - `onUpdatedPackageInfoForBucket_${pkg.studioId}_${pkg.bucketId}_${pkg.pieceExternalId}`, + `onUpdatedPackageInfoForBucket_${pkg.studioId}_${bucketId}_${source.pieceExternalId}`, () => { runIngestOperation(pkg.studioId, IngestJobs.BucketItemRegenerate, { - bucketId: pkg.bucketId, - externalId: pkg.pieceExternalId, + bucketId: bucketId, + externalId: source.pieceExternalId, }).catch((err) => { logger.error( - `Updating ExpectedPackages for Bucket "${pkg.bucketId}" Item "${ - pkg.pieceExternalId + `Updating ExpectedPackages for Bucket "${bucketId}" Item "${ + source.pieceExternalId }" failed: ${stringifyError(err)}` ) }) @@ -129,7 +138,7 @@ function onUpdatedPackageInfoForBucketItemDebounce( ) } -function onUpdatedPackageInfoForStudioBaselineDebounce(pkg: ExpectedPackageDBFromStudioBaselineObjects) { +function onUpdatedPackageInfoForStudioBaselineDebounce(pkg: ExpectedPackageDB) { lazyIgnore( `onUpdatedPackageInfoForStudioBaseline_${pkg.studioId}`, () => { diff --git a/meteor/server/api/integration/expectedPackages.ts b/meteor/server/api/integration/expectedPackages.ts index 42f98da9c3..f6d99d242c 100644 --- a/meteor/server/api/integration/expectedPackages.ts +++ b/meteor/server/api/integration/expectedPackages.ts @@ -34,6 +34,7 @@ import { } from '../../collections' import { logger } from '../../logging' import _ from 'underscore' +import { ExpectedPackageDB } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' export namespace PackageManagerIntegration { export async function updateExpectedPackageWorkStatuses( @@ -99,9 +100,17 @@ export namespace PackageManagerIntegration { const fromPackageIds = workStatus.fromPackages.map((p) => p.id) if (fromPackageIds.length) { ps.push( - ExpectedPackages.findOneAsync({ - _id: { $in: fromPackageIds }, - }).then((expPackage) => { + ExpectedPackages.findOneAsync( + { + _id: { $in: fromPackageIds }, + }, + { + projection: { + _id: 1, + studioId: 1, + }, + } + ).then((expPackage: Pick | undefined) => { if (!expPackage) throw new Meteor.Error(404, `ExpectedPackages "${fromPackageIds}" not found`) diff --git a/meteor/server/collections/packages-media.ts b/meteor/server/collections/packages-media.ts index f004da7555..2685562924 100644 --- a/meteor/server/collections/packages-media.ts +++ b/meteor/server/collections/packages-media.ts @@ -14,15 +14,17 @@ export const ExpectedPackages = createAsyncOnlyReadOnlyMongoCollection( diff --git a/meteor/server/migration/1_50_0.ts b/meteor/server/migration/1_50_0.ts index 08a6be47d8..cb194d8c5e 100644 --- a/meteor/server/migration/1_50_0.ts +++ b/meteor/server/migration/1_50_0.ts @@ -33,7 +33,6 @@ import { JSONBlobStringify, JSONSchema, TSR } from '@sofie-automation/blueprints import { DEFAULT_MINIMUM_TAKE_SPAN } from '@sofie-automation/shared-lib/dist/core/constants' import { PartId } from '@sofie-automation/shared-lib/dist/core/model/Ids' import { protectString } from '@sofie-automation/shared-lib/dist/lib/protectedString' -import { ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { AdLibActionId, BucketAdLibActionId, @@ -44,6 +43,7 @@ import { import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' +import * as PackagesPreR53 from '@sofie-automation/corelib/dist/dataModel/Old/ExpectedPackagesR52' // Release 50 @@ -161,9 +161,9 @@ const oldDeviceTypeToNewMapping = { } const EXPECTED_PACKAGE_TYPES_ADDED_PART_ID = [ - ExpectedPackageDBType.PIECE, - ExpectedPackageDBType.ADLIB_PIECE, - ExpectedPackageDBType.ADLIB_ACTION, + PackagesPreR53.ExpectedPackageDBType.PIECE, + PackagesPreR53.ExpectedPackageDBType.ADLIB_PIECE, + PackagesPreR53.ExpectedPackageDBType.ADLIB_ACTION, ] export const addSteps = addMigrationSteps('1.50.0', [ @@ -875,10 +875,10 @@ export const addSteps = addMigrationSteps('1.50.0', [ return false }, migrate: async () => { - const objects = await ExpectedPackages.findFetchAsync({ + const objects = (await ExpectedPackages.findFetchAsync({ fromPieceType: { $in: EXPECTED_PACKAGE_TYPES_ADDED_PART_ID as any }, // Force the types, as the query does not match due to the interfaces partId: { $exists: false }, - }) + })) as unknown as Array const neededPieceIds: Array< PieceId | AdLibActionId | RundownBaselineAdLibActionId | BucketAdLibId | BucketAdLibActionId diff --git a/meteor/server/migration/X_X_X.ts b/meteor/server/migration/X_X_X.ts index 7c7cef98e2..7056e6c1e6 100644 --- a/meteor/server/migration/X_X_X.ts +++ b/meteor/server/migration/X_X_X.ts @@ -1,7 +1,15 @@ import { addMigrationSteps } from './databaseMigration' import { CURRENT_SYSTEM_VERSION } from './currentSystemVersion' import { MongoInternals } from 'meteor/mongo' -import { Studios } from '../collections' +import { RundownPlaylists, Studios } from '../collections' +import { ExpectedPackages } from '../collections' +import * as PackagesPreR53 from '@sofie-automation/corelib/dist/dataModel/Old/ExpectedPackagesR52' +import { + ExpectedPackageDB, + ExpectedPackageIngestSource, +} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { BucketId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { assertNever, Complete } from '@sofie-automation/corelib/dist/lib' /* * ************************************************************************************** @@ -59,4 +67,156 @@ export const addSteps = addMigrationSteps(CURRENT_SYSTEM_VERSION, [ // Do nothing, the user will have to resolve this manually }, }, + { + id: `convert ExpectedPackages to new format`, + canBeRunAutomatically: true, + validate: async () => { + const packages = await ExpectedPackages.findFetchAsync({ + fromPieceType: { $exists: true }, + }) + + if (packages.length > 0) { + return 'ExpectedPackages must be converted to new format' + } + + return false + }, + migrate: async () => { + const packages = (await ExpectedPackages.findFetchAsync({ + fromPieceType: { $exists: true }, + })) as unknown as PackagesPreR53.ExpectedPackageDB[] + + for (const pkg of packages) { + let rundownId: RundownId | null = null + let bucketId: BucketId | null = null + let ingestSource: ExpectedPackageIngestSource | undefined + + switch (pkg.fromPieceType) { + case PackagesPreR53.ExpectedPackageDBType.PIECE: + case PackagesPreR53.ExpectedPackageDBType.ADLIB_PIECE: + rundownId = pkg.rundownId + ingestSource = { + fromPieceType: pkg.fromPieceType, + pieceId: pkg.pieceId, + partId: pkg.partId, + segmentId: pkg.segmentId, + blueprintPackageId: pkg.blueprintPackageId, + listenToPackageInfoUpdates: pkg.listenToPackageInfoUpdates, + } + break + case PackagesPreR53.ExpectedPackageDBType.ADLIB_ACTION: + rundownId = pkg.rundownId + ingestSource = { + fromPieceType: pkg.fromPieceType, + pieceId: pkg.pieceId, + partId: pkg.partId, + segmentId: pkg.segmentId, + blueprintPackageId: pkg.blueprintPackageId, + listenToPackageInfoUpdates: pkg.listenToPackageInfoUpdates, + } + break + case PackagesPreR53.ExpectedPackageDBType.BASELINE_ADLIB_PIECE: + rundownId = pkg.rundownId + ingestSource = { + fromPieceType: pkg.fromPieceType, + pieceId: pkg.pieceId, + blueprintPackageId: pkg.blueprintPackageId, + listenToPackageInfoUpdates: pkg.listenToPackageInfoUpdates, + } + break + case PackagesPreR53.ExpectedPackageDBType.BASELINE_ADLIB_ACTION: + rundownId = pkg.rundownId + ingestSource = { + fromPieceType: pkg.fromPieceType, + pieceId: pkg.pieceId, + blueprintPackageId: pkg.blueprintPackageId, + listenToPackageInfoUpdates: pkg.listenToPackageInfoUpdates, + } + break + case PackagesPreR53.ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: + rundownId = pkg.rundownId + ingestSource = { + fromPieceType: pkg.fromPieceType, + blueprintPackageId: pkg.blueprintPackageId, + listenToPackageInfoUpdates: pkg.listenToPackageInfoUpdates, + } + break + case PackagesPreR53.ExpectedPackageDBType.BUCKET_ADLIB: + bucketId = pkg.bucketId + ingestSource = { + fromPieceType: pkg.fromPieceType, + pieceId: pkg.pieceId, + pieceExternalId: pkg.pieceExternalId, + blueprintPackageId: pkg.blueprintPackageId, + listenToPackageInfoUpdates: pkg.listenToPackageInfoUpdates, + } + break + case PackagesPreR53.ExpectedPackageDBType.BUCKET_ADLIB_ACTION: + bucketId = pkg.bucketId + ingestSource = { + fromPieceType: pkg.fromPieceType, + pieceId: pkg.pieceId, + pieceExternalId: pkg.pieceExternalId, + blueprintPackageId: pkg.blueprintPackageId, + listenToPackageInfoUpdates: pkg.listenToPackageInfoUpdates, + } + break + case PackagesPreR53.ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: + ingestSource = { + fromPieceType: pkg.fromPieceType, + blueprintPackageId: pkg.blueprintPackageId, + listenToPackageInfoUpdates: pkg.listenToPackageInfoUpdates, + } + break + default: + assertNever(pkg) + break + } + + await ExpectedPackages.mutableCollection.removeAsync(pkg._id) + + if (ingestSource) { + await ExpectedPackages.mutableCollection.insertAsync({ + _id: pkg._id, // Preserve the old id to ensure references aren't broken. This will be 'corrected' upon first ingest operation + studioId: pkg.studioId, + rundownId: rundownId, + bucketId: bucketId, + package: { + ...(pkg as any), // Some fields should be pruned off this, but this is fine + _id: pkg.blueprintPackageId, + }, + created: pkg.created, + ingestSources: [ingestSource], + playoutSources: { + pieceInstanceIds: [], + }, + } satisfies Complete) + } + } + }, + }, + { + id: 'Add T-timers to RundownPlaylist', + canBeRunAutomatically: true, + validate: async () => { + const playlistCount = await RundownPlaylists.countDocuments({ tTimers: { $exists: false } }) + if (playlistCount > 1) return `There are ${playlistCount} RundownPlaylists without T-timers` + return false + }, + migrate: async () => { + await RundownPlaylists.mutableCollection.updateAsync( + { tTimers: { $exists: false } }, + { + $set: { + tTimers: [ + { index: 1, label: '', mode: null }, + { index: 2, label: '', mode: null }, + { index: 3, label: '', mode: null }, + ], + }, + }, + { multi: true } + ) + }, + }, ]) diff --git a/meteor/server/publications/packageManager/expectedPackages/contentCache.ts b/meteor/server/publications/packageManager/expectedPackages/contentCache.ts index b26af4113d..4d5a6533ed 100644 --- a/meteor/server/publications/packageManager/expectedPackages/contentCache.ts +++ b/meteor/server/publications/packageManager/expectedPackages/contentCache.ts @@ -3,7 +3,7 @@ import { literal } from '@sofie-automation/corelib/dist/lib' import { MongoFieldSpecifierOnesStrict } from '@sofie-automation/corelib/dist/mongo' import { ExpectedPackageDB } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' -import { PieceInstance, PieceInstancePiece } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' +import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' export type RundownPlaylistCompact = Pick< DBRundownPlaylist, @@ -16,27 +16,34 @@ export const rundownPlaylistFieldSpecifier = literal & { - piece: Pick -} +export type PieceInstanceCompact = Pick< + PieceInstance, + '_id' | 'rundownId' | 'partInstanceId' | 'neededExpectedPackageIds' +> export const pieceInstanceFieldsSpecifier = literal>({ _id: 1, rundownId: 1, - piece: { - expectedPackages: 1, - }, + partInstanceId: 1, + neededExpectedPackageIds: 1, +}) + +export type ExpectedPackageDBCompact = Pick + +export const expectedPackageDBFieldsSpecifier = literal>({ + _id: 1, + package: 1, }) export interface ExpectedPackagesContentCache { - ExpectedPackages: ReactiveCacheCollection + ExpectedPackages: ReactiveCacheCollection RundownPlaylists: ReactiveCacheCollection PieceInstances: ReactiveCacheCollection } export function createReactiveContentCache(): ExpectedPackagesContentCache { const cache: ExpectedPackagesContentCache = { - ExpectedPackages: new ReactiveCacheCollection('expectedPackages'), + ExpectedPackages: new ReactiveCacheCollection('expectedPackages'), RundownPlaylists: new ReactiveCacheCollection('rundownPlaylists'), PieceInstances: new ReactiveCacheCollection('pieceInstances'), } diff --git a/meteor/server/publications/packageManager/expectedPackages/generate.ts b/meteor/server/publications/packageManager/expectedPackages/generate.ts index ab39291755..5c815af910 100644 --- a/meteor/server/publications/packageManager/expectedPackages/generate.ts +++ b/meteor/server/publications/packageManager/expectedPackages/generate.ts @@ -1,6 +1,10 @@ -import { PackageContainerOnPackage, Accessor, AccessorOnPackage } from '@sofie-automation/blueprints-integration' -import { getContentVersionHash, getExpectedPackageId } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' -import { PeripheralDeviceId, ExpectedPackageId, PieceInstanceId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { + PackageContainerOnPackage, + Accessor, + AccessorOnPackage, + ExpectedPackage, +} from '@sofie-automation/blueprints-integration' +import { PeripheralDeviceId, ExpectedPackageId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { protectString, unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { PackageManagerExpectedPackage, @@ -15,7 +19,7 @@ import { DBStudio, StudioLight, StudioPackageContainer } from '@sofie-automation import { clone, omit } from '@sofie-automation/corelib/dist/lib' import { CustomPublishCollection } from '../../../lib/customPublication' import { logger } from '../../../logging' -import { ExpectedPackagesContentCache } from './contentCache' +import { ExpectedPackageDBCompact, ExpectedPackagesContentCache } from './contentCache' import type { StudioFields } from './publication' /** @@ -48,7 +52,7 @@ export async function updateCollectionForExpectedPackageIds( // Map the expectedPackages onto their specified layer: const allDeviceIds = new Set() - for (const layerName of packageDoc.layers) { + for (const layerName of packageDoc.package.layers) { const layerDeviceIds = layerNameToDeviceIds.get(layerName) for (const deviceId of layerDeviceIds || []) { allDeviceIds.add(deviceId) @@ -59,17 +63,7 @@ export async function updateCollectionForExpectedPackageIds( // Filter, keep only the routed mappings for this device: if (filterPlayoutDeviceIds && !filterPlayoutDeviceIds.includes(deviceId)) continue - const routedPackage = generateExpectedPackageForDevice( - studio, - { - ...packageDoc, - _id: unprotectString(packageDoc._id), - }, - deviceId, - null, - Priorities.OTHER, // low priority - packageContainers - ) + const routedPackage = generateExpectedPackageForDevice(studio, packageDoc, deviceId, packageContainers) updatedDocIds.add(routedPackage._id) collection.replace(routedPackage) @@ -78,118 +72,27 @@ export async function updateCollectionForExpectedPackageIds( // Remove all documents for an ExpectedPackage that was regenerated, and no update was issues collection.remove((doc) => { - if (doc.pieceInstanceId) return false - - if (missingExpectedPackageIds.has(protectString(doc.expectedPackage._id))) return true - - if (updatedDocIds.has(doc._id) && !regenerateIds.has(protectString(doc.expectedPackage._id))) return true - - return false - }) -} - -/** - * Regenerate the output for the provided PieceInstance `regenerateIds`, updating the data in `collection` as needed - * @param contentCache Cache of the database documents used - * @param studio Minimal studio document - * @param layerNameToDeviceIds Lookup table of package layers, to PeripheralDeviceIds the layer could be used with - * @param collection Output collection of the publication - * @param filterPlayoutDeviceIds PeripheralDeviceId filter applied to this publication - * @param regenerateIds Ids of PieceInstance documents to be recalculated - */ -export async function updateCollectionForPieceInstanceIds( - contentCache: ReadonlyDeep, - studio: Pick, - layerNameToDeviceIds: Map, - packageContainers: Record, - collection: CustomPublishCollection, - filterPlayoutDeviceIds: ReadonlyDeep | undefined, - regenerateIds: Set -): Promise { - const updatedDocIds = new Set() - const missingPieceInstanceIds = new Set() - - for (const pieceInstanceId of regenerateIds) { - const pieceInstanceDoc = contentCache.PieceInstances.findOne(pieceInstanceId) - if (!pieceInstanceDoc) { - missingPieceInstanceIds.add(pieceInstanceId) - continue - } - if (!pieceInstanceDoc.piece?.expectedPackages) continue - - pieceInstanceDoc.piece.expectedPackages.forEach((expectedPackage, i) => { - const sanitisedPackageId = getExpectedPackageId(pieceInstanceId, expectedPackage._id || '__unnamed' + i) - - // Map the expectedPackages onto their specified layer: - const allDeviceIds = new Set() - for (const layerName of expectedPackage.layers) { - const layerDeviceIds = layerNameToDeviceIds.get(layerName) - for (const deviceId of layerDeviceIds || []) { - allDeviceIds.add(deviceId) - } - } - - for (const deviceId of allDeviceIds) { - // Filter, keep only the routed mappings for this device: - if (filterPlayoutDeviceIds && !filterPlayoutDeviceIds.includes(deviceId)) continue - - const routedPackage = generateExpectedPackageForDevice( - studio, - { - ...expectedPackage, - _id: unprotectString(sanitisedPackageId), - rundownId: pieceInstanceDoc.rundownId, - contentVersionHash: getContentVersionHash(expectedPackage), - }, - deviceId, - pieceInstanceId, - Priorities.OTHER, // low priority - packageContainers - ) - - updatedDocIds.add(routedPackage._id) - collection.replace(routedPackage) - } - }) - } - - // Remove all documents for an ExpectedPackage that was regenerated, and no update was issues - collection.remove((doc) => { - if (!doc.pieceInstanceId) return false - - if (missingPieceInstanceIds.has(doc.pieceInstanceId)) return true + if (missingExpectedPackageIds.has(doc.expectedPackage._id)) return true - if (updatedDocIds.has(doc._id) && !regenerateIds.has(doc.pieceInstanceId)) return true + if (updatedDocIds.has(doc._id) && !regenerateIds.has(doc.expectedPackage._id)) return true return false }) } -enum Priorities { - // Lower priorities are done first - - /** Highest priority */ - PLAYOUT_CURRENT = 0, - /** Second-to-highest priority */ - PLAYOUT_NEXT = 1, - OTHER = 9, -} - function generateExpectedPackageForDevice( studio: Pick< StudioLight, '_id' | 'packageContainersWithOverrides' | 'previewContainerIds' | 'thumbnailContainerIds' >, - expectedPackage: PackageManagerExpectedPackageBase, + expectedPackage: ExpectedPackageDBCompact, deviceId: PeripheralDeviceId, - pieceInstanceId: PieceInstanceId | null, - priority: Priorities, packageContainers: Record ): PackageManagerExpectedPackage { // Lookup Package sources: const combinedSources: PackageContainerOnPackage[] = [] - for (const packageSource of expectedPackage.sources) { + for (const packageSource of expectedPackage.package.sources) { const lookedUpSource = packageContainers[packageSource.containerId] if (lookedUpSource) { combinedSources.push(calculateCombinedSource(packageSource, lookedUpSource)) @@ -207,24 +110,27 @@ function generateExpectedPackageForDevice( } // Lookup Package targets: - const combinedTargets = calculateCombinedTargets(expectedPackage, deviceId, packageContainers) + const combinedTargets = calculateCombinedTargets(expectedPackage.package, deviceId, packageContainers) - if (!combinedSources.length && expectedPackage.sources.length !== 0) { + if (!combinedSources.length && expectedPackage.package.sources.length !== 0) { logger.warn(`Pub.expectedPackagesForDevice: No sources found for "${expectedPackage._id}"`) } if (!combinedTargets.length) { logger.warn(`Pub.expectedPackagesForDevice: No targets found for "${expectedPackage._id}"`) } - expectedPackage.sideEffect = getSideEffect(expectedPackage, studio) + const packageSideEffect = getSideEffect(expectedPackage.package, studio) return { - _id: protectString(`${expectedPackage._id}_${deviceId}_${pieceInstanceId}`), - expectedPackage: expectedPackage, + _id: protectString(`${expectedPackage._id}_${deviceId}`), + expectedPackage: { + ...expectedPackage.package, + _id: expectedPackage._id, + sideEffect: packageSideEffect, + }, sources: combinedSources, targets: combinedTargets, - priority: priority, + priority: null, playoutDeviceId: deviceId, - pieceInstanceId, } } @@ -247,7 +153,7 @@ function calculateCombinedSource( for (const accessorId of accessorIds) { const sourceAccessor: Accessor.Any | undefined = lookedUpSource.container.accessors[accessorId] - const packageAccessor: AccessorOnPackage.Any | undefined = packageSource.accessors?.[accessorId] + const packageAccessor: ReadonlyDeep | undefined = packageSource.accessors?.[accessorId] if (packageAccessor && sourceAccessor && packageAccessor.type === sourceAccessor.type) { combinedSource.accessors[accessorId] = deepExtend({}, sourceAccessor, packageAccessor) @@ -261,7 +167,7 @@ function calculateCombinedSource( return combinedSource } function calculateCombinedTargets( - expectedPackage: PackageManagerExpectedPackageBase, + expectedPackage: ReadonlyDeep, deviceId: PeripheralDeviceId, packageContainers: Record ): PackageContainerOnPackage[] { diff --git a/meteor/server/publications/packageManager/expectedPackages/publication.ts b/meteor/server/publications/packageManager/expectedPackages/publication.ts index 5791b7ca5b..46328b5ce8 100644 --- a/meteor/server/publications/packageManager/expectedPackages/publication.ts +++ b/meteor/server/publications/packageManager/expectedPackages/publication.ts @@ -24,7 +24,7 @@ import { PackageManagerExpectedPackage } from '@sofie-automation/shared-lib/dist import { ExpectedPackagesContentObserver } from './contentObserver' import { createReactiveContentCache, ExpectedPackagesContentCache } from './contentCache' import { buildMappingsToDeviceIdMap } from './util' -import { updateCollectionForExpectedPackageIds, updateCollectionForPieceInstanceIds } from './generate' +import { updateCollectionForExpectedPackageIds } from './generate' import { PeripheralDevicePubSub, PeripheralDevicePubSubCollectionsNames, @@ -162,16 +162,13 @@ async function manipulateExpectedPackagesPublicationData( } let regenerateExpectedPackageIds: Set - let regeneratePieceInstanceIds: Set if (invalidateAllItems) { - // force every piece to be regenerated + // force every package to be regenerated collection.remove(null) regenerateExpectedPackageIds = new Set(state.contentCache.ExpectedPackages.find({}).map((p) => p._id)) - regeneratePieceInstanceIds = new Set(state.contentCache.PieceInstances.find({}).map((p) => p._id)) } else { // only regenerate the reported changes regenerateExpectedPackageIds = new Set(updateProps.invalidateExpectedPackageIds) - regeneratePieceInstanceIds = new Set(updateProps.invalidatePieceInstanceIds) } await updateCollectionForExpectedPackageIds( @@ -183,15 +180,53 @@ async function manipulateExpectedPackagesPublicationData( args.filterPlayoutDeviceIds, regenerateExpectedPackageIds ) - await updateCollectionForPieceInstanceIds( - state.contentCache, - state.studio, - state.layerNameToDeviceIds, - state.packageContainers, - collection, - args.filterPlayoutDeviceIds, - regeneratePieceInstanceIds - ) + + // Ensure the priorities are correct for the packages + // We can do this as a post-step, as it means we can generate the packages solely based on the content + // If one gets regenerated, its priority will be reset to OTHER. But as it has already changed, this fixup is 'free' + // For those not regenerated, we can set the priority to the correct value if it has changed, without any deeper checks + updatePackagePriorities(state.contentCache, collection) +} + +const PACKAGE_PRIORITY_PLAYOUT_CURRENT = 0 +const PACKAGE_PRIORITY_PLAYOUT_NEXT = 1 +const PACKAGE_PRIORITY_OTHER = 9 + +function updatePackagePriorities( + contentCache: ReadonlyDeep, + collection: CustomPublishCollection +) { + const packagePriorities = new Map() + + // Compile the map of the expected priority of each package + const knownPieceInstances = contentCache.PieceInstances.find({}) + const playlist = contentCache.RundownPlaylists.findOne({}) + const currentPartInstanceId = playlist?.currentPartInfo?.partInstanceId + for (const pieceInstance of knownPieceInstances) { + const packageIds = pieceInstance.neededExpectedPackageIds + if (!packageIds) continue + + const packagePriority = + pieceInstance.partInstanceId === currentPartInstanceId + ? PACKAGE_PRIORITY_PLAYOUT_CURRENT + : PACKAGE_PRIORITY_PLAYOUT_NEXT + + for (const packageId of packageIds) { + const existingPriority = packagePriorities.get(packageId) ?? PACKAGE_PRIORITY_OTHER + packagePriorities.set(packageId, Math.min(existingPriority, packagePriority)) + } + } + + // Iterate through and update each package + collection.updateAll((pkg) => { + const expectedPriority = packagePriorities.get(pkg.expectedPackage._id) ?? PACKAGE_PRIORITY_OTHER + if (pkg.priority === expectedPriority) return false + + return { + ...pkg, + priority: expectedPriority, + } + }) } meteorCustomPublish( diff --git a/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts b/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts index 1fd4f25426..f6a8069a8e 100644 --- a/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts +++ b/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts @@ -37,6 +37,7 @@ import { MediaObjects } from '../../../collections' import { PieceDependencies } from '../common' import { DEFAULT_MINIMUM_TAKE_SPAN } from '@sofie-automation/shared-lib/dist/core/constants' import { PieceContentStatusMessageFactory } from '../messageFactory' +import { RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' const mockMediaObjectsCollection = MongoMock.getInnerMockCollection(MediaObjects) @@ -450,9 +451,17 @@ describe('lib/mediaObjects', () => { timelineObjectsString: EmptyPieceTimelineObjectsBlob, }) + const mockOwnerId = protectString('rundown0') + const messageFactory = new PieceContentStatusMessageFactory(undefined) - const status1 = await checkPieceContentStatusAndDependencies(mockStudio, messageFactory, piece1, sourcelayer1) + const status1 = await checkPieceContentStatusAndDependencies( + mockStudio, + mockOwnerId, + messageFactory, + piece1, + sourcelayer1 + ) expect(status1[0].status).toEqual(PieceStatusCode.OK) expect(status1[0].messages).toHaveLength(0) expect(status1[1]).toMatchObject( @@ -463,7 +472,13 @@ describe('lib/mediaObjects', () => { }) ) - const status2 = await checkPieceContentStatusAndDependencies(mockStudio, messageFactory, piece2, sourcelayer1) + const status2 = await checkPieceContentStatusAndDependencies( + mockStudio, + mockOwnerId, + messageFactory, + piece2, + sourcelayer1 + ) expect(status2[0].status).toEqual(PieceStatusCode.SOURCE_BROKEN) expect(status2[0].messages).toHaveLength(1) expect(status2[0].messages[0]).toMatchObject({ @@ -477,7 +492,13 @@ describe('lib/mediaObjects', () => { }) ) - const status3 = await checkPieceContentStatusAndDependencies(mockStudio, messageFactory, piece3, sourcelayer1) + const status3 = await checkPieceContentStatusAndDependencies( + mockStudio, + mockOwnerId, + messageFactory, + piece3, + sourcelayer1 + ) expect(status3[0].status).toEqual(PieceStatusCode.SOURCE_MISSING) expect(status3[0].messages).toHaveLength(1) expect(status3[0].messages[0]).toMatchObject({ diff --git a/meteor/server/publications/pieceContentStatusUI/bucket/regenerateForItem.ts b/meteor/server/publications/pieceContentStatusUI/bucket/regenerateForItem.ts index 3c737d3329..a050f1f5af 100644 --- a/meteor/server/publications/pieceContentStatusUI/bucket/regenerateForItem.ts +++ b/meteor/server/publications/pieceContentStatusUI/bucket/regenerateForItem.ts @@ -48,6 +48,7 @@ export async function regenerateForBucketAdLibIds( if (sourceLayer) { const [status, itemDependencies] = await checkPieceContentStatusAndDependencies( uiStudio, + actionDoc.bucketId, messageFactories.get(actionDoc.showStyleBaseId), actionDoc, sourceLayer @@ -120,6 +121,7 @@ export async function regenerateForBucketActionIds( const [status, itemDependencies] = await checkPieceContentStatusAndDependencies( uiStudio, + actionDoc.bucketId, messageFactories.get(actionDoc.showStyleBaseId), fakedPiece, sourceLayer diff --git a/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts b/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts index 5584e24961..5aa69a23db 100644 --- a/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts +++ b/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts @@ -11,7 +11,14 @@ import { VTContent, } from '@sofie-automation/blueprints-integration' import { getExpectedPackageId } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' -import { ExpectedPackageId, PeripheralDeviceId, PieceInstanceId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { + BucketId, + ExpectedPackageId, + PeripheralDeviceId, + PieceInstanceId, + RundownId, + StudioId, +} from '@sofie-automation/corelib/dist/dataModel/Ids' import { getPackageContainerPackageId, PackageContainerPackageStatusDB, @@ -220,6 +227,7 @@ export interface PieceContentStatusStudio export async function checkPieceContentStatusAndDependencies( studio: PieceContentStatusStudio, + packageOwnerId: RundownId | BucketId | StudioId, messageFactory: PieceContentStatusMessageFactory | undefined, piece: PieceContentStatusPiece, sourceLayer: ISourceLayer @@ -290,6 +298,7 @@ export async function checkPieceContentStatusAndDependencies( piece, sourceLayer, studio, + packageOwnerId, getPackageInfos, getPackageContainerPackageStatus, messageFactory || DEFAULT_MESSAGE_FACTORY @@ -589,6 +598,7 @@ async function checkPieceContentExpectedPackageStatus( piece: PieceContentStatusPiece, sourceLayer: ISourceLayer, studio: PieceContentStatusStudio, + packageOwnerId: RundownId | BucketId | StudioId, getPackageInfos: (packageId: ExpectedPackageId) => Promise, getPackageContainerPackageStatus: ( packageContainerId: string, @@ -657,20 +667,8 @@ async function checkPieceContentExpectedPackageStatus( checkedPackageContainers.add(matchedPackageContainer[0]) - const expectedPackageIds = [getExpectedPackageId(piece._id, expectedPackage._id)] - if (piece.pieceInstanceId) { - // If this is a PieceInstance, try looking up the PieceInstance first - expectedPackageIds.unshift(getExpectedPackageId(piece.pieceInstanceId, expectedPackage._id)) - - if (piece.previousPieceInstanceId) { - // Also try the previous PieceInstance, when this is an infinite continuation in case package-manager needs to catchup - expectedPackageIds.unshift( - getExpectedPackageId(piece.previousPieceInstanceId, expectedPackage._id) - ) - } - } - const fileName = getExpectedPackageFileName(expectedPackage) ?? '' + const containerLabel = matchedPackageContainer[1].container.label // Check if any of the sources exist and are valid // Future: This might be better to do by passing packageManager an 'forcedError' property in the publication, but this direct check is simpler and enough for now @@ -690,55 +688,51 @@ async function checkPieceContentExpectedPackageStatus( continue } - let warningMessage: ContentMessageLight | null = null - let matchedExpectedPackageId: ExpectedPackageId | null = null - for (const expectedPackageId of expectedPackageIds) { - const packageOnPackageContainer = await getPackageContainerPackageStatus( - matchedPackageContainer[0], - expectedPackageId - ) - if (!packageOnPackageContainer) continue - - matchedExpectedPackageId = expectedPackageId - - if (!thumbnailUrl) { - const sideEffect = getSideEffect(expectedPackage, studio) + const candidatePackageId = getExpectedPackageId(packageOwnerId, expectedPackage) + const packageOnPackageContainer = await getPackageContainerPackageStatus( + matchedPackageContainer[0], + candidatePackageId + ) + if (!packageOnPackageContainer) { + // If no package matched, we must have a warning - thumbnailUrl = await getAssetUrlFromPackageContainerStatus( - studio.packageContainers, - getPackageContainerPackageStatus, - expectedPackageId, - sideEffect.thumbnailContainerId, - sideEffect.thumbnailPackageSettings?.path - ) - } + pushOrMergeMessage({ + ...getPackageSourceMissingWarning(), + fileName: fileName, + packageContainers: [containerLabel], + }) - if (!previewUrl) { - const sideEffect = getSideEffect(expectedPackage, studio) + continue + } - previewUrl = await getAssetUrlFromPackageContainerStatus( - studio.packageContainers, - getPackageContainerPackageStatus, - expectedPackageId, - sideEffect.previewContainerId, - sideEffect.previewPackageSettings?.path - ) - } + if (!thumbnailUrl) { + const sideEffect = getSideEffect(expectedPackage, studio) - warningMessage = getPackageWarningMessage(packageOnPackageContainer.status) + thumbnailUrl = await getAssetUrlFromPackageContainerStatus( + studio.packageContainers, + getPackageContainerPackageStatus, + candidatePackageId, + sideEffect.thumbnailContainerId, + sideEffect.thumbnailPackageSettings?.path + ) + } - progress = getPackageProgress(packageOnPackageContainer.status) ?? undefined + if (!previewUrl) { + const sideEffect = getSideEffect(expectedPackage, studio) - // Found a packageOnPackageContainer - break + previewUrl = await getAssetUrlFromPackageContainerStatus( + studio.packageContainers, + getPackageContainerPackageStatus, + candidatePackageId, + sideEffect.previewContainerId, + sideEffect.previewPackageSettings?.path + ) } - const containerLabel = matchedPackageContainer[1].container.label - - if (!matchedExpectedPackageId || warningMessage) { - // If no package matched, we must have a warning - warningMessage = warningMessage ?? getPackageSourceMissingWarning() + progress = getPackageProgress(packageOnPackageContainer.status) ?? undefined + const warningMessage = getPackageWarningMessage(packageOnPackageContainer.status) + if (warningMessage) { pushOrMergeMessage({ ...warningMessage, fileName: fileName, @@ -753,7 +747,7 @@ async function checkPieceContentExpectedPackageStatus( containerLabel, } // Fetch scan-info about the package: - const dbPackageInfos = await getPackageInfos(matchedExpectedPackageId) + const dbPackageInfos = await getPackageInfos(candidatePackageId) for (const packageInfo of dbPackageInfos) { if (packageInfo.type === PackageInfo.Type.SCAN) { packageInfos[expectedPackage._id].scan = packageInfo.payload diff --git a/meteor/server/publications/pieceContentStatusUI/rundown/regenerateItems.ts b/meteor/server/publications/pieceContentStatusUI/rundown/regenerateItems.ts index 2aae8333f6..8b01eb8cc1 100644 --- a/meteor/server/publications/pieceContentStatusUI/rundown/regenerateItems.ts +++ b/meteor/server/publications/pieceContentStatusUI/rundown/regenerateItems.ts @@ -46,6 +46,7 @@ async function regenerateGenericPiece( if (part && segment && sourceLayer) { const [status, dependencies] = await checkPieceContentStatusAndDependencies( uiStudio, + part.rundownId, messageFactory, pieceDoc, sourceLayer @@ -182,6 +183,7 @@ export async function regenerateForPieceInstanceIds( if (partInstance && segment && sourceLayer) { const [status, dependencies] = await checkPieceContentStatusAndDependencies( uiStudio, + pieceDoc.rundownId, messageFactories.get(pieceDoc.rundownId), { ...pieceDoc.piece, @@ -381,6 +383,7 @@ export async function regenerateForBaselineAdLibPieceIds( if (sourceLayer) { const [status, dependencies] = await checkPieceContentStatusAndDependencies( uiStudio, + pieceDoc.rundownId, messageFactories.get(pieceDoc.rundownId), pieceDoc, sourceLayer @@ -461,6 +464,7 @@ export async function regenerateForBaselineAdLibActionIds( if (sourceLayer) { const [status, dependencies] = await checkPieceContentStatusAndDependencies( uiStudio, + actionDoc.rundownId, messageFactories.get(actionDoc.rundownId), fakedPiece, sourceLayer diff --git a/meteor/server/worker/__tests__/jobQueue.test.ts b/meteor/server/worker/__tests__/jobQueue.test.ts new file mode 100644 index 0000000000..93db5024fd --- /dev/null +++ b/meteor/server/worker/__tests__/jobQueue.test.ts @@ -0,0 +1,688 @@ +import '../../../__mocks__/_extendJest' +import { waitTime } from '../../../__mocks__/helpers/jest' +import { WorkerJobQueueManager } from '../jobQueue' + +// Mock Meteor.defer to run synchronously for testing +jest.mock('meteor/meteor', () => ({ + Meteor: { + defer: (fn: () => void) => { + // Run deferred functions immediately in tests + setTimeout(fn, 0) + }, + }, +})) + +// Mock the logging module +jest.mock('../../logging') + +// Mock getCurrentTime +const mockCurrentTime = jest.fn(() => Date.now()) +jest.mock('../../lib/lib', () => ({ + getCurrentTime: () => mockCurrentTime(), +})) + +describe('WorkerJobQueueManager', () => { + let manager: WorkerJobQueueManager + + beforeEach(() => { + manager = new WorkerJobQueueManager() + mockCurrentTime.mockReturnValue(Date.now()) + }) + + afterEach(() => { + jest.clearAllMocks() + }) + + describe('queueJobWithoutResult', () => { + it('should queue a job in the high priority queue by default', async () => { + const queueName = 'testQueue' + const jobName = 'testJob' + const jobData = { foo: 'bar' } + + await manager.queueJobWithoutResult(queueName, jobName, jobData, undefined) + + // Verify job is retrievable + const job = await manager.getNextJob(queueName) + expect(job).not.toBeNull() + expect(job?.name).toBe(jobName) + expect(job?.data).toEqual(jobData) + }) + + it('should queue a job in the low priority queue when lowPriority is true', async () => { + const queueName = 'testQueue' + const jobName = 'testJob' + const jobData = { foo: 'bar' } + + await manager.queueJobWithoutResult(queueName, jobName, jobData, { lowPriority: true }) + + // Verify job is retrievable + const job = await manager.getNextJob(queueName) + expect(job).not.toBeNull() + expect(job?.name).toBe(jobName) + }) + + it('should prioritize high priority jobs over low priority jobs', async () => { + const queueName = 'testQueue' + + // Queue low priority job first + await manager.queueJobWithoutResult(queueName, 'lowPriorityJob', { priority: 'low' }, { lowPriority: true }) + + // Queue high priority job second + await manager.queueJobWithoutResult(queueName, 'highPriorityJob', { priority: 'high' }, undefined) + + // First job retrieved should be high priority + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe('highPriorityJob') + + // Second job retrieved should be low priority + const secondJob = await manager.getNextJob(queueName) + expect(secondJob?.name).toBe('lowPriorityJob') + }) + }) + + describe('queueJobAndWrapResult', () => { + it('should return a WorkerJob with complete and getTimings promises', async () => { + const queueName = 'testQueue' + const jobName = 'testJob' + const jobData = { foo: 'bar' } + const now = Date.now() + + const workerJob = manager.queueJobAndWrapResult(queueName, jobName, jobData, now) + + expect(workerJob).toHaveProperty('complete') + expect(workerJob).toHaveProperty('getTimings') + expect(workerJob.complete).toBeInstanceOf(Promise) + expect(workerJob.getTimings).toBeInstanceOf(Promise) + }) + + it('should resolve complete promise with result when job finishes successfully', async () => { + const queueName = 'testQueue' + const jobName = 'testJob' + const jobData = { foo: 'bar' } + const now = Date.now() + const expectedResult = { success: true } + + const workerJob = manager.queueJobAndWrapResult(queueName, jobName, jobData, now) + + // Get the job from queue + const job = await manager.getNextJob(queueName) + expect(job).not.toBeNull() + + // Simulate job completion + const startedTime = now + 100 + const finishedTime = now + 200 + await manager.jobFinished(job!.id, startedTime, finishedTime, null, expectedResult) + + // Wait for the deferred callback + await waitTime(10) + + // Verify result + const result = await workerJob.complete + expect(result).toEqual(expectedResult) + }) + + it('should reject complete promise when job finishes with error', async () => { + const queueName = 'testQueue' + const jobName = 'testJob' + const jobData = { foo: 'bar' } + const now = Date.now() + + const workerJob = manager.queueJobAndWrapResult(queueName, jobName, jobData, now) + + // Add catch handler to avoid unhandled rejection + workerJob.complete.catch(() => { + // Expected rejection + }) + + // Get the job from queue + const job = await manager.getNextJob(queueName) + expect(job).not.toBeNull() + + // Simulate job failure + const startedTime = now + 100 + const finishedTime = now + 200 + await manager.jobFinished(job!.id, startedTime, finishedTime, 'Job failed', null) + + // Wait for the deferred callback + await waitTime(10) + + // Verify error - the error message is wrapped in an Error object + await expect(workerJob.complete).rejects.toBeInstanceOf(Error) + }) + + it('should resolve getTimings promise with correct timing information', async () => { + const queueName = 'testQueue' + const jobName = 'testJob' + const jobData = { foo: 'bar' } + const queueTime = 1000 + const startedTime = 1100 + const finishedTime = 1200 + const completedTime = 1250 + + mockCurrentTime.mockReturnValue(completedTime) + + const workerJob = manager.queueJobAndWrapResult(queueName, jobName, jobData, queueTime) + + // Get the job from queue + const job = await manager.getNextJob(queueName) + expect(job).not.toBeNull() + + // Simulate job completion + await manager.jobFinished(job!.id, startedTime, finishedTime, null, { result: 'ok' }) + + // Wait for the deferred callback + await waitTime(10) + + // Verify timings + const timings = await workerJob.getTimings + expect(timings.queueTime).toBe(queueTime) + expect(timings.startedTime).toBe(startedTime) + expect(timings.finishedTime).toBe(finishedTime) + expect(timings.completedTime).toBe(completedTime) + }) + }) + + describe('getNextJob', () => { + it('should return null when no jobs are queued', async () => { + const job = await manager.getNextJob('emptyQueue') + expect(job).toBeNull() + }) + + it('should return jobs in FIFO order within same priority', async () => { + const queueName = 'testQueue' + + await manager.queueJobWithoutResult(queueName, 'job1', { order: 1 }, undefined) + await manager.queueJobWithoutResult(queueName, 'job2', { order: 2 }, undefined) + await manager.queueJobWithoutResult(queueName, 'job3', { order: 3 }, undefined) + + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe('job1') + + const secondJob = await manager.getNextJob(queueName) + expect(secondJob?.name).toBe('job2') + + const thirdJob = await manager.getNextJob(queueName) + expect(thirdJob?.name).toBe('job3') + + const noJob = await manager.getNextJob(queueName) + expect(noJob).toBeNull() + }) + }) + + describe('waitForNextJob', () => { + it('should resolve immediately if jobs are already queued', async () => { + const queueName = 'testQueue' + + await manager.queueJobWithoutResult(queueName, 'existingJob', {}, undefined) + + // Should resolve without waiting + await expect(manager.waitForNextJob(queueName)).resolves.toBeUndefined() + }) + + it('should wait for a job to be queued', async () => { + const queueName = 'testQueue' + + // Start waiting for a job + const waitPromise = manager.waitForNextJob(queueName) + + // Queue a job after a short delay + setTimeout(async () => { + await manager.queueJobWithoutResult(queueName, 'newJob', {}, undefined) + }, 10) + + // Wait should resolve once job is queued + await expect(waitPromise).resolves.toBeUndefined() + }) + + it('should reject old worker when new worker starts waiting', async () => { + const queueName = 'testQueue' + + // First worker starts waiting + const firstWaitPromise = manager.waitForNextJob(queueName) + + // Add catch handler to prevent unhandled rejection warning + firstWaitPromise.catch(() => { + // Expected rejection + }) + + // Second worker starts waiting, should reject first + const secondWaitPromise = manager.waitForNextJob(queueName) + + // Wait for deferred rejection + await waitTime(10) + + // First worker should be rejected + await expect(firstWaitPromise).rejects.toThrow('new workerThread, replacing the old') + + // Queue a job for second worker + await manager.queueJobWithoutResult(queueName, 'job', {}, undefined) + + // Wait for deferred notification + await waitTime(10) + + // Second worker should resolve + await expect(secondWaitPromise).resolves.toBeUndefined() + }) + }) + + describe('interruptJobStream', () => { + it('should resolve waiting worker', async () => { + const queueName = 'testQueue' + + // Start waiting for a job + const waitPromise = manager.waitForNextJob(queueName) + + // Interrupt the queue + await manager.interruptJobStream(queueName) + + // Wait for deferred resolution + await waitTime(10) + + // Wait should resolve + await expect(waitPromise).resolves.toBeUndefined() + }) + + it('should push null job if no worker is waiting', async () => { + const queueName = 'testQueue' + + // Interrupt without any worker waiting + await manager.interruptJobStream(queueName) + + // Next worker should get null immediately (handled in getNextJob) + // But waitForNextJob should return immediately as there's a null job in queue + await expect(manager.waitForNextJob(queueName)).resolves.toBeUndefined() + }) + }) + + describe('rejectAllRunning', () => { + it('should reject all running jobs with error', async () => { + const queueName = 'testQueue' + + // Queue multiple jobs + const job1 = manager.queueJobAndWrapResult(queueName, 'job1', {}, Date.now()) + const job2 = manager.queueJobAndWrapResult(queueName, 'job2', {}, Date.now()) + + // Get jobs from queue (marks them as running) + await manager.getNextJob(queueName) + await manager.getNextJob(queueName) + + // Reject all running + manager.rejectAllRunning() + + // Both jobs should be rejected + await expect(job1.complete).rejects.toThrow('Thread closed') + await expect(job2.complete).rejects.toThrow('Thread closed') + }) + }) + + describe('debounce', () => { + it('should skip queueing duplicate job when debounce is enabled', async () => { + const queueName = 'testQueue' + const jobName = 'debounceJob' + const jobData = { foo: 'bar' } + const startTime = Date.now() + + mockCurrentTime.mockReturnValue(startTime) + + // Queue first job with debounce + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: 1000 }) + + // Queue identical job with debounce + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: 1000 }) + + // Advance time past debounce + mockCurrentTime.mockReturnValue(startTime + 1001) + + // Only one job should be in the queue + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe(jobName) + + const secondJob = await manager.getNextJob(queueName) + expect(secondJob).toBeNull() + }) + + it('should allow queueing different job names even with debounce', async () => { + const queueName = 'testQueue' + const jobData = { foo: 'bar' } + const startTime = Date.now() + + mockCurrentTime.mockReturnValue(startTime) + + await manager.queueJobWithoutResult(queueName, 'job1', jobData, { debounce: 1000 }) + await manager.queueJobWithoutResult(queueName, 'job2', jobData, { debounce: 1000 }) + + // Advance time past debounce + mockCurrentTime.mockReturnValue(startTime + 1001) + + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe('job1') + + const secondJob = await manager.getNextJob(queueName) + expect(secondJob?.name).toBe('job2') + }) + + it('should allow queueing same job name with different data even with debounce', async () => { + const queueName = 'testQueue' + const jobName = 'debounceJob' + const startTime = Date.now() + + mockCurrentTime.mockReturnValue(startTime) + + await manager.queueJobWithoutResult(queueName, jobName, { value: 1 }, { debounce: 1000 }) + await manager.queueJobWithoutResult(queueName, jobName, { value: 2 }, { debounce: 1000 }) + + // Advance time past debounce + mockCurrentTime.mockReturnValue(startTime + 1001) + + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe(jobName) + expect(firstJob?.data).toEqual({ value: 1 }) + + const secondJob = await manager.getNextJob(queueName) + expect(secondJob?.name).toBe(jobName) + expect(secondJob?.data).toEqual({ value: 2 }) + }) + + it('should queue job without debounce flag even if identical job exists', async () => { + const queueName = 'testQueue' + const jobName = 'testJob' + const jobData = { foo: 'bar' } + const startTime = Date.now() + + mockCurrentTime.mockReturnValue(startTime) + + // Queue with debounce + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: 1000 }) + + // Queue without debounce - should still be added (and available immediately) + await manager.queueJobWithoutResult(queueName, jobName, jobData, undefined) + + // The non-debounced job should be available immediately + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe(jobName) + + // The debounced job is not ready yet + const noJobYet = await manager.getNextJob(queueName) + expect(noJobYet).toBeNull() + + // Advance time past debounce + mockCurrentTime.mockReturnValue(startTime + 1001) + + const secondJob = await manager.getNextJob(queueName) + expect(secondJob?.name).toBe(jobName) + }) + + it('should allow re-queueing job after original is consumed', async () => { + const queueName = 'testQueue' + const jobName = 'debounceJob' + const jobData = { foo: 'bar' } + + // Queue first job with debounce + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: 1000 }) + + // Consume the job - need to wait for debounce time first + mockCurrentTime.mockReturnValue(Date.now() + 1001) + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe(jobName) + + // Queue same job again with debounce - should work since original was consumed + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: 1000 }) + + mockCurrentTime.mockReturnValue(Date.now() + 2002) + const secondJob = await manager.getNextJob(queueName) + expect(secondJob?.name).toBe(jobName) + }) + + it('should debounce across priority queues - high to low', async () => { + const queueName = 'testQueue' + const jobName = 'debounceJob' + const jobData = { foo: 'bar' } + + // Queue in high priority with debounce + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: 1000 }) + + // Try to queue identical in low priority with debounce - should be debounced + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: 1000, lowPriority: true }) + + // Wait for debounce time + mockCurrentTime.mockReturnValue(Date.now() + 1001) + + // Only one job should exist (still in high priority) + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe(jobName) + + const secondJob = await manager.getNextJob(queueName) + expect(secondJob).toBeNull() + }) + + it('should debounce across priority queues - low to high with priority upgrade', async () => { + const queueName = 'testQueue' + const jobName = 'debounceJob' + const jobData = { foo: 'bar' } + + // Queue in low priority with debounce + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: 1000, lowPriority: true }) + + // Try to queue identical in high priority with debounce - should upgrade existing job + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: 1000 }) + + // Wait for debounce time + mockCurrentTime.mockReturnValue(Date.now() + 1001) + + // Only one job should exist (upgraded to high priority) + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe(jobName) + + const secondJob = await manager.getNextJob(queueName) + expect(secondJob).toBeNull() + }) + + it('should prioritize upgraded job over other low priority jobs', async () => { + const queueName = 'testQueue' + const jobName = 'debounceJob' + const jobData = { foo: 'bar' } + + // Queue a low priority job first + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: 1000, lowPriority: true }) + + // Queue another low priority job + await manager.queueJobWithoutResult( + queueName, + 'otherLowPriorityJob', + { other: true }, + { lowPriority: true } + ) + + // Upgrade the first job to high priority + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: 1000 }) + + // Wait for debounce time + mockCurrentTime.mockReturnValue(Date.now() + 1001) + + // First job retrieved should be the upgraded one (now high priority) + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe(jobName) + + // Second should be the other low priority job + const secondJob = await manager.getNextJob(queueName) + expect(secondJob?.name).toBe('otherLowPriorityJob') + }) + + it('should respect debounce timing - getNextJob ignores jobs before notBefore', async () => { + const queueName = 'testQueue' + const jobName = 'debounceJob' + const jobData = { foo: 'bar' } + const debounceTime = 100 + const startTime = Date.now() + + mockCurrentTime.mockReturnValue(startTime) + + // Queue job with debounce + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: debounceTime }) + + // Job should not be available yet (before notBefore) + const jobBefore = await manager.getNextJob(queueName) + expect(jobBefore).toBeNull() + + // Advance time past debounce + mockCurrentTime.mockReturnValue(startTime + debounceTime + 1) + + // Job should now be available + const jobAfter = await manager.getNextJob(queueName) + expect(jobAfter?.name).toBe(jobName) + }) + + it('should respect debounce timing - waitForNextJob considers notBefore', async () => { + const queueName = 'testQueue' + const jobName = 'debounceJob' + const jobData = { foo: 'bar' } + const debounceTime = 100 + const startTime = Date.now() + + mockCurrentTime.mockReturnValue(startTime) + + // Queue job with debounce + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: debounceTime }) + + // waitForNextJob should wait (no ready jobs) - start waiting + const waitPromise = manager.waitForNextJob(queueName) + + // Advance time past debounce - this should trigger the timer and resolve the wait + mockCurrentTime.mockReturnValue(startTime + debounceTime + 1) + + // Wait a bit for the timer to fire + await waitTime(debounceTime + 50) + + // The wait should have resolved + await expect(waitPromise).resolves.toBeUndefined() + + // And the job should now be available + const job = await manager.getNextJob(queueName) + expect(job?.name).toBe(jobName) + }) + + it('should allow duplicate job after debounce time expires and job is consumed', async () => { + const queueName = 'testQueue' + const jobName = 'debounceJob' + const jobData = { foo: 'bar' } + const debounceTime = 100 + const startTime = Date.now() + + mockCurrentTime.mockReturnValue(startTime) + + // Queue first job with debounce + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: debounceTime }) + + // Advance time past debounce and consume the job + mockCurrentTime.mockReturnValue(startTime + debounceTime + 1) + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe(jobName) + + // Queue same job again - should work since original was consumed + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: debounceTime }) + + // Advance time past second debounce + mockCurrentTime.mockReturnValue(startTime + 2 * debounceTime + 2) + const secondJob = await manager.getNextJob(queueName) + expect(secondJob?.name).toBe(jobName) + }) + + it('should extend debounce window (notBefore) on subsequent debounce calls', async () => { + const queueName = 'testQueue' + const jobName = 'debounceJob' + const jobData = { foo: 'bar' } + const debounceTime = 100 + const startTime = Date.now() + + mockCurrentTime.mockReturnValue(startTime) + + // Queue first job with debounce - notBefore = startTime + 100 + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: debounceTime }) + + // After 50ms, queue duplicate - should extend notBefore to startTime + 150 + mockCurrentTime.mockReturnValue(startTime + 50) + await manager.queueJobWithoutResult(queueName, jobName, jobData, { debounce: debounceTime }) + + // At startTime + 110, job should NOT be ready (extended to 150) + mockCurrentTime.mockReturnValue(startTime + 110) + const jobTooEarly = await manager.getNextJob(queueName) + expect(jobTooEarly).toBeNull() + + // At startTime + 151, job should be ready + mockCurrentTime.mockReturnValue(startTime + 151) + const jobReady = await manager.getNextJob(queueName) + expect(jobReady?.name).toBe(jobName) + + // Should only be one job + const noMoreJobs = await manager.getNextJob(queueName) + expect(noMoreJobs).toBeNull() + }) + + it('should process non-debounced jobs immediately even when debounced jobs are waiting', async () => { + const queueName = 'testQueue' + const debounceTime = 1000 + const startTime = Date.now() + + mockCurrentTime.mockReturnValue(startTime) + + // Queue debounced job first + await manager.queueJobWithoutResult( + queueName, + 'debouncedJob', + { debounced: true }, + { debounce: debounceTime } + ) + + // Queue non-debounced job second + await manager.queueJobWithoutResult(queueName, 'immediateJob', { immediate: true }, undefined) + + // Non-debounced job should be available immediately + const firstJob = await manager.getNextJob(queueName) + expect(firstJob?.name).toBe('immediateJob') + + // Debounced job should not be available yet + const secondJob = await manager.getNextJob(queueName) + expect(secondJob).toBeNull() + + // After debounce time, debounced job should be available + mockCurrentTime.mockReturnValue(startTime + debounceTime + 1) + const thirdJob = await manager.getNextJob(queueName) + expect(thirdJob?.name).toBe('debouncedJob') + }) + }) + + describe('multiple queues', () => { + it('should maintain separate queues for different queue names', async () => { + const queue1 = 'queue1' + const queue2 = 'queue2' + + await manager.queueJobWithoutResult(queue1, 'jobInQueue1', { queue: 1 }, undefined) + await manager.queueJobWithoutResult(queue2, 'jobInQueue2', { queue: 2 }, undefined) + + const jobFromQueue1 = await manager.getNextJob(queue1) + expect(jobFromQueue1?.name).toBe('jobInQueue1') + + const jobFromQueue2 = await manager.getNextJob(queue2) + expect(jobFromQueue2?.name).toBe('jobInQueue2') + + // Each queue should be empty now + expect(await manager.getNextJob(queue1)).toBeNull() + expect(await manager.getNextJob(queue2)).toBeNull() + }) + + it('should not mix jobs between different queues', async () => { + const queue1 = 'queue1' + const queue2 = 'queue2' + + await manager.queueJobWithoutResult(queue1, 'job1', {}, undefined) + await manager.queueJobWithoutResult(queue1, 'job2', {}, undefined) + + // Queue2 should have no jobs + expect(await manager.getNextJob(queue2)).toBeNull() + + // Queue1 should have both jobs + expect(await manager.getNextJob(queue1)).not.toBeNull() + expect(await manager.getNextJob(queue1)).not.toBeNull() + }) + }) +}) diff --git a/meteor/server/worker/jobQueue.ts b/meteor/server/worker/jobQueue.ts new file mode 100644 index 0000000000..459116cf10 --- /dev/null +++ b/meteor/server/worker/jobQueue.ts @@ -0,0 +1,410 @@ +import { UserError } from '@sofie-automation/corelib/dist/error' +import { MetricsCounter } from '@sofie-automation/corelib/dist/prometheus' +import type { JobSpec } from '@sofie-automation/job-worker/dist/main' +import { Meteor } from 'meteor/meteor' +import type { JobTimings, WorkerJob } from './worker' +import type { Time } from '@sofie-automation/shared-lib/dist/lib/lib' +import type { QueueJobOptions } from '@sofie-automation/job-worker/dist/jobs' +import { getRandomString } from '@sofie-automation/corelib/dist/lib' +import { logger } from '../logging' +import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' +import { getCurrentTime } from '../lib/lib' +import _ from 'underscore' + +const metricsQueueTotalCounter = new MetricsCounter({ + name: 'sofie_meteor_jobqueue_queue_total', + help: 'Number of jobs put into each worker job queues', + labelNames: ['threadName'], +}) +const metricsQueueSuccessCounter = new MetricsCounter({ + name: 'sofie_meteor_jobqueue_success', + help: 'Number of successful jobs from each worker', + labelNames: ['threadName'], +}) +const metricsQueueErrorsCounter = new MetricsCounter({ + name: 'sofie_meteor_jobqueue_queue_errors', + help: 'Number of failed jobs from each worker', + labelNames: ['threadName'], +}) + +interface JobQueue { + // A null job is an interruption of the queue; to ensure that something waiting is woken up + jobsHighPriority: Array + jobsLowPriority: Array + + /** Notify that there is a job waiting (aka worker is long-polling) */ + notifyWorker: PromiseWithResolvers | null + + metricsTotal: MetricsCounter.Internal + metricsSuccess: MetricsCounter.Internal + metricsErrors: MetricsCounter.Internal +} + +type JobCompletionHandler = (startedTime: number, finishedTime: number, err: any, result: any) => void + +interface RunningJob { + queueName: string + completionHandler: JobCompletionHandler | null +} + +interface JobEntry { + spec: JobSpec + /** The completionHandler is called when a job is completed. null implies "shoot-and-forget" */ + completionHandler: JobCompletionHandler | null + /** If set, the job should not be executed before this time (used for debouncing) */ + notBefore?: Time + /** Timer handle for waking up workers when this job becomes ready */ + debounceTimer?: NodeJS.Timeout +} + +export class WorkerJobQueueManager { + readonly #queues = new Map() + /** Contains all jobs that are currently being executed by a Worker. */ + readonly #runningJobs = new Map() + + #getOrCreateQueue(queueName: string): JobQueue { + let queue = this.#queues.get(queueName) + if (!queue) { + queue = { + jobsHighPriority: [], + jobsLowPriority: [], + notifyWorker: null, + metricsTotal: metricsQueueTotalCounter.labels(queueName), + metricsSuccess: metricsQueueSuccessCounter.labels(queueName), + metricsErrors: metricsQueueErrorsCounter.labels(queueName), + } + this.#queues.set(queueName, queue) + } + return queue + } + + // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types + async jobFinished(id: string, startedTime: number, finishedTime: number, err: any, result: any): Promise { + const job = this.#runningJobs.get(id) + if (job) { + this.#runningJobs.delete(id) + + // Update metrics + const queue = this.#queues.get(job.queueName) + if (queue) { + if (err) { + queue.metricsErrors.inc() + } else { + queue.metricsSuccess.inc() + } + } + + if (job.completionHandler) { + const userError = err ? UserError.tryFromJSON(err) || new Error(err) : undefined + job.completionHandler(startedTime, finishedTime, userError, result) + } + } + } + /** This is called by each Worker Thread, when it is idle and wants another job */ + async waitForNextJob(queueName: string): Promise { + const queue = this.#getOrCreateQueue(queueName) + const now = getCurrentTime() + + // Helper to check if a job is ready to execute + const isJobReady = (job: JobEntry | null): boolean => { + if (!job) return true // null jobs (interrupts) are always "ready" + return !job.notBefore || job.notBefore <= now + } + + // Check if there is a ready job waiting + if (queue.jobsHighPriority.some(isJobReady) || queue.jobsLowPriority.some(isJobReady)) { + return + } + // No ready job, do a long-poll + + // Already a worker waiting? Reject it, as we replace it + if (queue.notifyWorker) { + const oldNotify = queue.notifyWorker + + Meteor.defer(() => { + try { + // Notify the worker in the background + oldNotify.reject(new Error('new workerThread, replacing the old')) + } catch (_e) { + // Ignore + } + }) + } + + // Wait to be notified about a job + queue.notifyWorker = Promise.withResolvers() + return queue.notifyWorker.promise + } + /** This is called by each Worker Thread, when it thinks there is a job to execute */ + async getNextJob(queueName: string): Promise { + const queue = this.#getOrCreateQueue(queueName) + const now = getCurrentTime() + + // Helper to check if a job is ready to execute + const isJobReady = (job: JobEntry | null): boolean => { + if (!job) return true // null jobs (interrupts) are always "ready" + return !job.notBefore || job.notBefore <= now + } + + // Prefer high priority jobs - find first ready job + const highPriorityIndex = queue.jobsHighPriority.findIndex(isJobReady) + if (highPriorityIndex !== -1) { + const job = queue.jobsHighPriority.splice(highPriorityIndex, 1)[0] + if (job) { + this.#runningJobs.set(job.spec.id, { + queueName, + completionHandler: job.completionHandler, + }) + return job.spec + } + // null job (interrupt) - return null + return null + } + + // Check low priority jobs + const lowPriorityIndex = queue.jobsLowPriority.findIndex(isJobReady) + if (lowPriorityIndex !== -1) { + const job = queue.jobsLowPriority.splice(lowPriorityIndex, 1)[0] + this.#runningJobs.set(job.spec.id, { + queueName, + completionHandler: job.completionHandler, + }) + return job.spec + } + + // No ready job + return null + } + /** This is called when something restarts, to ensure the `queue.notifyWorker` doesnt get stuck */ + async interruptJobStream(queueName: string): Promise { + // Check if there is a job waiting: + const queue = this.#getOrCreateQueue(queueName) + if (queue.notifyWorker) { + const oldNotify = queue.notifyWorker + queue.notifyWorker = null + + Meteor.defer(() => { + try { + // Notify the worker in the background + oldNotify.resolve() + } catch (_e) { + // Ignore + } + }) + } else { + // There should be a worker waiting, its `getNextJob` might not have reached us yet + // So we push a `null` job at the start so that it interrupts immediately + queue.jobsHighPriority.unshift(null) + } + } + + async queueJobWithoutResult( + queueName: string, + jobName: string, + jobData: unknown, + options: QueueJobOptions | undefined + ): Promise { + this.#queueJobInner( + queueName, + { + spec: { + id: getRandomString(), + name: jobName, + data: jobData, + }, + completionHandler: null, + }, + options + ) + } + + queueJobAndWrapResult( + queueName: string, + jobName: string, + jobData: unknown, + now: Time, + options?: QueueJobOptions + ): WorkerJob { + const jobId = getRandomString() + const { result, completionHandler } = generateCompletionHandler(jobId, now) + + this.#queueJobInner( + queueName, + { + spec: { + id: jobId, + name: jobName, + data: jobData, + }, + completionHandler: completionHandler, + }, + options + ) + + return result + } + + #queueJobInner(queueName: string, jobToQueue: JobEntry, options?: QueueJobOptions): void { + const queue = this.#getOrCreateQueue(queueName) + const isLowPriority = options?.lowPriority ?? false + const debounceTime = options?.debounce + + // Debounce: check if an identical job is already queued in either priority queue + if (debounceTime) { + const matchJob = (job: JobEntry | null): job is JobEntry => + job !== null && job.spec.name === jobToQueue.spec.name && _.isEqual(job.spec.data, jobToQueue.spec.data) + + // Check high priority queue + const existingHighPriorityIndex = queue.jobsHighPriority.findIndex(matchJob) + if (existingHighPriorityIndex !== -1) { + // Job exists in high priority - just extend the notBefore time + const existingJob = queue.jobsHighPriority[existingHighPriorityIndex] as JobEntry + existingJob.notBefore = getCurrentTime() + debounceTime + + logger.debug(`Debounced duplicate job "${jobToQueue.spec.name}" in queue "${queueName}" (extended)`) + this.#scheduleDebounceWakeup(queue, existingJob) + return + } + + // Check low priority queue + const existingLowPriorityIndex = queue.jobsLowPriority.findIndex(matchJob) + if (existingLowPriorityIndex !== -1) { + const existingJob = queue.jobsLowPriority[existingLowPriorityIndex] + if (isLowPriority) { + // Job exists in low priority, new job is also low priority - just extend notBefore + existingJob.notBefore = getCurrentTime() + debounceTime + + logger.debug(`Debounced duplicate job "${jobToQueue.spec.name}" in queue "${queueName}" (extended)`) + this.#scheduleDebounceWakeup(queue, existingJob) + return + } else { + // Job exists in low priority, but new job is high priority - upgrade it + queue.jobsLowPriority.splice(existingLowPriorityIndex, 1) + existingJob.notBefore = getCurrentTime() + debounceTime + queue.jobsHighPriority.push(existingJob) + logger.debug( + `Debounced duplicate job "${jobToQueue.spec.name}" in queue "${queueName}" (upgraded to high priority)` + ) + this.#scheduleDebounceWakeup(queue, existingJob) + return + } + } + + // No existing job found, set notBefore on the new job + jobToQueue.notBefore = getCurrentTime() + debounceTime + } + + // Queue the job based on priority + if (isLowPriority) { + queue.jobsLowPriority.push(jobToQueue) + } else { + queue.jobsHighPriority.push(jobToQueue) + } + + queue.metricsTotal.inc() + + // If there is a worker waiting to pick up a job + if (jobToQueue.notBefore) { + // Schedule a wakeup for when the debounce time expires + this.#scheduleDebounceWakeup(queue, jobToQueue) + } else { + // Ensure a waiting worker is notified + this.#notifyWorker(queue) + } + } + + #scheduleDebounceWakeup(queue: JobQueue, job: JobEntry): void { + // Clear any existing timer for this job to avoid accumulating timers + if (job.debounceTimer) { + clearTimeout(job.debounceTimer) + delete job.debounceTimer + } + + if (job.notBefore) { + const delay = Math.max(0, job.notBefore - getCurrentTime()) + job.debounceTimer = setTimeout(() => { + delete job.debounceTimer + // Ensure a waiting worker is notified + this.#notifyWorker(queue) + }, delay) + } + } + + #notifyWorker(queue: JobQueue): void { + if (queue.notifyWorker) { + const notify = queue.notifyWorker + + // Worker is about to be notified, so clear the handle: + queue.notifyWorker = null + Meteor.defer(() => { + try { + // Notify the worker in the background + notify.resolve() + } catch (e) { + // Queue failed + logger.error(`Error in notifyWorker: ${stringifyError(e)}`) + } + }) + } + } + + rejectAllRunning(): void { + const now = getCurrentTime() + for (const job of this.#runningJobs.values()) { + const queue = this.#queues.get(job.queueName) + if (queue) queue.metricsErrors.inc() + + if (job.completionHandler) { + job.completionHandler(now, now, new Error('Thread closed'), null) + } + } + this.#runningJobs.clear() + } +} + +function generateCompletionHandler( + jobId: string, + queueTime: Time +): { result: WorkerJob; completionHandler: JobCompletionHandler } { + // logger.debug(`Queued job #${job.id} of "${name}" to "${queue.name}"`) + + const complete = Promise.withResolvers() + const getTimings = Promise.withResolvers() + + // TODO: Worker - timeouts + + /** The handler is called upon a completion */ + const completionHandler: JobCompletionHandler = (startedTime: number, finishedTime: number, err: any, res: any) => { + try { + if (err) { + logger.debug(`Completed job #${jobId} with error`) + complete.reject(err) + } else { + logger.debug(`Completed job #${jobId} with success`) + complete.resolve(res) + } + } catch (e) { + logger.error(`Job completion failed: ${stringifyError(e)}`) + } + + try { + getTimings.resolve({ + queueTime, + startedTime, + + finishedTime, + completedTime: getCurrentTime(), + }) + } catch (e) { + logger.error(`Job timing resolve failed: ${stringifyError(e)}`) + } + } + + return { + result: { + complete: complete.promise, + getTimings: getTimings.promise, + }, + completionHandler, + } +} diff --git a/meteor/server/worker/worker.ts b/meteor/server/worker/worker.ts index cab5db533f..6a813e0a3a 100644 --- a/meteor/server/worker/worker.ts +++ b/meteor/server/worker/worker.ts @@ -6,10 +6,8 @@ import { logger } from '../logging' import { Meteor } from 'meteor/meteor' import { FORCE_CLEAR_CACHES_JOB, IS_INSPECTOR_ENABLED } from '@sofie-automation/corelib/dist/worker/shared' import { threadedClass, Promisify, ThreadedClassManager } from 'threadedclass' -import type { JobSpec } from '@sofie-automation/job-worker/dist/main' import type { IpcJobWorker } from '@sofie-automation/job-worker/dist/ipc' import { getRandomString } from '@sofie-automation/corelib/dist/lib' -import type { Time } from '@sofie-automation/shared-lib/dist/lib/lib' import { getCurrentTime } from '../lib/lib' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' import { UserActionsLogItem } from '@sofie-automation/meteor-lib/dist/collections/UserActionsLog' @@ -21,210 +19,15 @@ import { LogEntry } from 'winston' import { initializeWorkerStatus, setWorkerStatus } from './workerStatus' import { MongoQuery } from '@sofie-automation/corelib/dist/mongo' import { UserActionsLog } from '../collections' -import { MetricsCounter } from '@sofie-automation/corelib/dist/prometheus' import { isInTestWrite } from '../security/securityVerify' -import { UserError } from '@sofie-automation/corelib/dist/error' +import { QueueJobOptions } from '@sofie-automation/job-worker/dist/jobs' +import { WorkerJobQueueManager } from './jobQueue' const FREEZE_LIMIT = 1000 // how long to wait for a response to a Ping const RESTART_TIMEOUT = 30000 // how long to wait for a restart to complete before throwing an error const KILL_TIMEOUT = 30000 // how long to wait for a thread to terminate before throwing an error -interface JobEntry { - spec: JobSpec - /** The completionHandler is called when a job is completed. null implies "shoot-and-forget" */ - completionHandler: JobCompletionHandler | null -} - -const metricsQueueTotalCounter = new MetricsCounter({ - name: 'sofie_meteor_jobqueue_queue_total', - help: 'Number of jobs put into each worker job queues', - labelNames: ['threadName'], -}) -const metricsQueueSuccessCounter = new MetricsCounter({ - name: 'sofie_meteor_jobqueue_success', - help: 'Number of successful jobs from each worker', - labelNames: ['threadName'], -}) -const metricsQueueErrorsCounter = new MetricsCounter({ - name: 'sofie_meteor_jobqueue_queue_errors', - help: 'Number of failed jobs from each worker', - labelNames: ['threadName'], -}) - -interface JobQueue { - jobs: Array - /** Notify that there is a job waiting (aka worker is long-polling) */ - notifyWorker: PromiseWithResolvers | null - - metricsTotal: MetricsCounter.Internal - metricsSuccess: MetricsCounter.Internal - metricsErrors: MetricsCounter.Internal -} - -type JobCompletionHandler = (startedTime: number, finishedTime: number, err: any, result: any) => void - -interface RunningJob { - queueName: string - completionHandler: JobCompletionHandler | null -} - -const queues = new Map() -/** Contains all jobs that are currently being executed by a Worker. */ -const runningJobs = new Map() - -function getOrCreateQueue(queueName: string): JobQueue { - let queue = queues.get(queueName) - if (!queue) { - queue = { - jobs: [], - notifyWorker: null, - metricsTotal: metricsQueueTotalCounter.labels(queueName), - metricsSuccess: metricsQueueSuccessCounter.labels(queueName), - metricsErrors: metricsQueueErrorsCounter.labels(queueName), - } - queues.set(queueName, queue) - } - return queue -} - -async function jobFinished( - id: string, - startedTime: number, - finishedTime: number, - err: any, - result: any -): Promise { - const job = runningJobs.get(id) - if (job) { - runningJobs.delete(id) - - // Update metrics - const queue = queues.get(job.queueName) - if (queue) { - if (err) { - queue.metricsErrors.inc() - } else { - queue.metricsSuccess.inc() - } - } - - if (job.completionHandler) { - const userError = err ? UserError.tryFromJSON(err) || new Error(err) : undefined - job.completionHandler(startedTime, finishedTime, userError, result) - } - } -} -/** This is called by each Worker Thread, when it is idle and wants another job */ -async function waitForNextJob(queueName: string): Promise { - // Check if there is a job waiting: - const queue = getOrCreateQueue(queueName) - if (queue.jobs.length > 0) { - return - } - // No job ready, do a long-poll - - // Already a worker waiting? Reject it, as we replace it - if (queue.notifyWorker) { - const oldNotify = queue.notifyWorker - - Meteor.defer(() => { - try { - // Notify the worker in the background - oldNotify.reject(new Error('new workerThread, replacing the old')) - } catch (_e) { - // Ignore - } - }) - } - - // Wait to be notified about a job - queue.notifyWorker = Promise.withResolvers() - return queue.notifyWorker.promise -} -/** This is called by each Worker Thread, when it thinks there is a job to execute */ -async function getNextJob(queueName: string): Promise { - // Check if there is a job waiting: - const queue = getOrCreateQueue(queueName) - const job = queue.jobs.shift() - if (job) { - // If there is a completion handler, register it for execution - runningJobs.set(job.spec.id, { - queueName, - completionHandler: job.completionHandler, - }) - - // Pass the job to the worker - return job.spec - } - - // No job ready - return null -} -/** This is called by each Worker Thread, when it is idle and wants another job */ -async function interruptJobStream(queueName: string): Promise { - // Check if there is a job waiting: - const queue = getOrCreateQueue(queueName) - if (queue.notifyWorker) { - const oldNotify = queue.notifyWorker - queue.notifyWorker = null - - Meteor.defer(() => { - try { - // Notify the worker in the background - oldNotify.resolve() - } catch (_e) { - // Ignore - } - }) - } else { - queue.jobs.unshift(null) - } -} -async function queueJobWithoutResult(queueName: string, jobName: string, jobData: unknown): Promise { - queueJobInner(queueName, { - spec: { - id: getRandomString(), - name: jobName, - data: jobData, - }, - completionHandler: null, - }) -} - -function queueJobInner(queueName: string, jobToQueue: JobEntry): void { - // Put the job at the end of the queue: - const queue = getOrCreateQueue(queueName) - queue.jobs.push(jobToQueue) - queue.metricsTotal.inc() - - // If there is a worker waiting to pick up a job - if (queue.notifyWorker) { - const notify = queue.notifyWorker - - // Worker is about to be notified, so clear the handle: - queue.notifyWorker = null - Meteor.defer(() => { - try { - // Notify the worker in the background - notify.resolve() - } catch (e) { - // Queue failed - logger.error(`Error in notifyWorker: ${stringifyError(e)}`) - } - }) - } -} - -function queueJobAndWrapResult(queueName: string, job: JobSpec, now: Time): WorkerJob { - const { result, completionHandler } = generateCompletionHandler(job.id, now) - - queueJobInner(queueName, { - spec: job, - completionHandler: completionHandler, - }) - - return result -} +const queueManager = new WorkerJobQueueManager() async function fastTrackTimeline(newTimeline: TimelineComplete): Promise { const studio = await fetchStudioLight(newTimeline._id) @@ -300,11 +103,11 @@ Meteor.startup(async () => { 'IpcJobWorker', [ workerId, - jobFinished, - interruptJobStream, - waitForNextJob, - getNextJob, - queueJobWithoutResult, + queueManager.jobFinished.bind(queueManager), + queueManager.interruptJobStream.bind(queueManager), + queueManager.waitForNextJob.bind(queueManager), + queueManager.getNextJob.bind(queueManager), + queueManager.queueJobWithoutResult.bind(queueManager), logLine, fastTrackTimeline, !IS_INSPECTOR_ENABLED, @@ -343,16 +146,7 @@ Meteor.startup(async () => { 'thread_closed', Meteor.bindEnvironment(() => { // Thread closed, reject all jobs - const now = getCurrentTime() - for (const job of runningJobs.values()) { - const queue = queues.get(job.queueName) - if (queue) queue.metricsErrors.inc() - - if (job.completionHandler) { - job.completionHandler(now, now, new Error('Thread closed'), null) - } - } - runningJobs.clear() + queueManager.rejectAllRunning() setWorkerStatus(workerId, false, 'Closed').catch((e) => { logger.error(`Failed to update worker threads status: ${stringifyError(e)}`) @@ -407,41 +201,17 @@ export async function QueueForceClearAllCaches(studioIds: StudioId[]): Promise( jobName: T, studioId: StudioId, - jobParameters: Parameters[0] + jobParameters: Parameters[0], + options?: QueueJobOptions ): Promise>> { if (isInTestWrite()) throw new Meteor.Error(404, 'Should not be reachable during startup tests') if (!studioId) throw new Meteor.Error(500, 'Missing studioId') const now = getCurrentTime() - return queueJobAndWrapResult( - getStudioQueueName(studioId), - { - id: getRandomString(), - name: jobName, - data: jobParameters, - }, - now - ) + return queueManager.queueJobAndWrapResult(getStudioQueueName(studioId), jobName, jobParameters, now, options) } /** @@ -491,60 +254,5 @@ export async function QueueIngestJob( if (!studioId) throw new Meteor.Error(500, 'Missing studioId') const now = getCurrentTime() - return queueJobAndWrapResult( - getIngestQueueName(studioId), - { - id: getRandomString(), - name: jobName, - data: jobParameters, - }, - now - ) -} - -function generateCompletionHandler( - jobId: string, - queueTime: Time -): { result: WorkerJob; completionHandler: JobCompletionHandler } { - // logger.debug(`Queued job #${job.id} of "${name}" to "${queue.name}"`) - - const complete = Promise.withResolvers() - const getTimings = Promise.withResolvers() - - // TODO: Worker - timeouts - - /** The handler is called upon a completion */ - const completionHandler: JobCompletionHandler = (startedTime: number, finishedTime: number, err: any, res: any) => { - try { - if (err) { - logger.debug(`Completed job #${jobId} with error`) - complete.reject(err) - } else { - logger.debug(`Completed job #${jobId} with success`) - complete.resolve(res) - } - } catch (e) { - logger.error(`Job completion failed: ${stringifyError(e)}`) - } - - try { - getTimings.resolve({ - queueTime, - startedTime, - - finishedTime, - completedTime: getCurrentTime(), - }) - } catch (e) { - logger.error(`Job timing resolve failed: ${stringifyError(e)}`) - } - } - - return { - result: { - complete: complete.promise, - getTimings: getTimings.promise, - }, - completionHandler, - } + return queueManager.queueJobAndWrapResult(getIngestQueueName(studioId), jobName, jobParameters, now) } diff --git a/meteor/yarn.lock b/meteor/yarn.lock index b5bebd1474..b9708443a2 100644 --- a/meteor/yarn.lock +++ b/meteor/yarn.lock @@ -1208,6 +1208,7 @@ __metadata: "@sofie-automation/corelib": "npm:1.53.0-in-development" "@sofie-automation/shared-lib": "npm:1.53.0-in-development" amqplib: "npm:^0.10.5" + chrono-node: "npm:^2.9.0" deepmerge: "npm:^4.3.1" elastic-apm-node: "npm:^4.11.0" mongodb: "npm:^6.12.0" @@ -2899,6 +2900,13 @@ __metadata: languageName: node linkType: hard +"chrono-node@npm:^2.9.0": + version: 2.9.0 + resolution: "chrono-node@npm:2.9.0" + checksum: 10/a30bbaa67f9a127e711db6e694ee4c89292d8f533dbfdc3d7cb34f479728e02e377f682e75ad84dd4b6a16016c248a5e85fb453943b96f93f5993f5ccddc6d08 + languageName: node + linkType: hard + "ci-info@npm:^3.2.0": version: 3.8.0 resolution: "ci-info@npm:3.8.0" diff --git a/packages/blueprints-integration/src/api/showStyle.ts b/packages/blueprints-integration/src/api/showStyle.ts index 307fdbe80e..43182638f3 100644 --- a/packages/blueprints-integration/src/api/showStyle.ts +++ b/packages/blueprints-integration/src/api/showStyle.ts @@ -140,7 +140,7 @@ export interface ShowStyleBlueprintManifest Promise<{ validationErrors: any } | void> + ) => Promise /** Generate adlib piece from ingest data */ getAdlibItem?: ( diff --git a/packages/blueprints-integration/src/context/adlibActionContext.ts b/packages/blueprints-integration/src/context/adlibActionContext.ts index 4435d76b41..afca8bcff0 100644 --- a/packages/blueprints-integration/src/context/adlibActionContext.ts +++ b/packages/blueprints-integration/src/context/adlibActionContext.ts @@ -5,6 +5,7 @@ import { IPartAndPieceActionContext } from './partsAndPieceActionContext.js' import { IExecuteTSRActionsContext } from './executeTsrActionContext.js' import { IBlueprintPart, IBlueprintPartInstance, IBlueprintPiece } from '../index.js' import { IRouteSetMethods } from './routeSetContext.js' +import { ITTimersContext } from './tTimersContext.js' /** Actions */ export interface IDataStoreMethods { @@ -26,7 +27,8 @@ export interface IActionExecutionContext IDataStoreMethods, IPartAndPieceActionContext, IExecuteTSRActionsContext, - IRouteSetMethods { + IRouteSetMethods, + ITTimersContext { /** Fetch the showstyle config for the specified part */ // getNextShowStyleConfig(): Readonly<{ [key: string]: ConfigItemValue }> diff --git a/packages/blueprints-integration/src/context/onSetAsNextContext.ts b/packages/blueprints-integration/src/context/onSetAsNextContext.ts index 9e729ce402..c32b06d8e3 100644 --- a/packages/blueprints-integration/src/context/onSetAsNextContext.ts +++ b/packages/blueprints-integration/src/context/onSetAsNextContext.ts @@ -6,18 +6,19 @@ import { IBlueprintPieceDB, IBlueprintPieceInstance, IBlueprintResolvedPieceInstance, - IBlueprintSegment, + IBlueprintSegmentDB, IEventContext, IShowStyleUserContext, } from '../index.js' import { BlueprintQuickLookInfo } from './quickLoopInfo.js' import { ReadonlyDeep } from 'type-fest' +import type { ITTimersContext } from './tTimersContext.js' /** * Context in which 'current' is the part currently on air, and 'next' is the partInstance being set as Next * This is similar to `IPartAndPieceActionContext`, but has more limits on what is allowed to be changed. */ -export interface IOnSetAsNextContext extends IShowStyleUserContext, IEventContext { +export interface IOnSetAsNextContext extends IShowStyleUserContext, IEventContext, ITTimersContext { /** Information about the current loop, if there is one */ readonly quickLoopInfo: BlueprintQuickLookInfo | null @@ -55,7 +56,7 @@ export interface IOnSetAsNextContext extends IShowStyleUserContext, IEventContex /** Gets the Part for a Piece retrieved from findLastScriptedPieceOnLayer. This primarily allows for accessing metadata of the Part */ getPartForPreviousPiece(piece: IBlueprintPieceDB): Promise /** Gets the Segment. This primarily allows for accessing metadata */ - getSegment(segment: 'current' | 'next'): Promise + getSegment(segment: 'current' | 'next'): Promise /** Get a list of the upcoming Parts in the Rundown, in the order that they will be Taken * diff --git a/packages/blueprints-integration/src/context/onTakeContext.ts b/packages/blueprints-integration/src/context/onTakeContext.ts index 3918bdd7ee..50606a37ba 100644 --- a/packages/blueprints-integration/src/context/onTakeContext.ts +++ b/packages/blueprints-integration/src/context/onTakeContext.ts @@ -1,6 +1,7 @@ import { IEventContext, IShowStyleUserContext, Time } from '../index.js' import { IPartAndPieceActionContext } from './partsAndPieceActionContext.js' import { IExecuteTSRActionsContext } from './executeTsrActionContext.js' +import { ITTimersContext } from './tTimersContext.js' /** * Context in which 'current' is the partInstance we're leaving, and 'next' is the partInstance we're taking @@ -9,7 +10,8 @@ export interface IOnTakeContext extends IPartAndPieceActionContext, IShowStyleUserContext, IEventContext, - IExecuteTSRActionsContext { + IExecuteTSRActionsContext, + ITTimersContext { /** Inform core that a take out of the taken partinstance should be blocked until the specified time */ blockTakeUntil(time: Time | null): Promise /** diff --git a/packages/blueprints-integration/src/context/partsAndPieceActionContext.ts b/packages/blueprints-integration/src/context/partsAndPieceActionContext.ts index 22af1b509f..e9773a2caa 100644 --- a/packages/blueprints-integration/src/context/partsAndPieceActionContext.ts +++ b/packages/blueprints-integration/src/context/partsAndPieceActionContext.ts @@ -7,7 +7,7 @@ import { IBlueprintPieceDB, IBlueprintPieceInstance, IBlueprintResolvedPieceInstance, - IBlueprintSegment, + IBlueprintSegmentDB, Time, } from '../index.js' import { BlueprintQuickLookInfo } from './quickLoopInfo.js' @@ -47,7 +47,7 @@ export interface IPartAndPieceActionContext { /** Gets the Part for a Piece retrieved from findLastScriptedPieceOnLayer. This primarily allows for accessing metadata of the Part */ getPartForPreviousPiece(piece: IBlueprintPieceDB): Promise /** Gets the Segment. This primarily allows for accessing metadata */ - getSegment(segment: 'current' | 'next'): Promise + getSegment(segment: 'current' | 'next'): Promise /** Get a list of the upcoming Parts in the Rundown, in the order that they will be Taken * diff --git a/packages/blueprints-integration/src/context/rundownContext.ts b/packages/blueprints-integration/src/context/rundownContext.ts index 402da1fa39..cf3a30e332 100644 --- a/packages/blueprints-integration/src/context/rundownContext.ts +++ b/packages/blueprints-integration/src/context/rundownContext.ts @@ -4,6 +4,7 @@ import type { IPackageInfoContext } from './packageInfoContext.js' import type { IShowStyleContext } from './showStyleContext.js' import type { IExecuteTSRActionsContext } from './executeTsrActionContext.js' import type { IDataStoreMethods } from './adlibActionContext.js' +import { ITTimersContext } from './tTimersContext.js' export interface IRundownContext extends IShowStyleContext { readonly rundownId: string @@ -13,7 +14,11 @@ export interface IRundownContext extends IShowStyleContext { export interface IRundownUserContext extends IUserNotesContext, IRundownContext {} -export interface IRundownActivationContext extends IRundownContext, IExecuteTSRActionsContext, IDataStoreMethods { +export interface IRundownActivationContext + extends IRundownContext, + IExecuteTSRActionsContext, + IDataStoreMethods, + ITTimersContext { /** Info about the RundownPlaylist state before the Activation / Deactivation event */ readonly previousState: IRundownActivationContextState readonly currentState: IRundownActivationContextState diff --git a/packages/blueprints-integration/src/context/tTimersContext.ts b/packages/blueprints-integration/src/context/tTimersContext.ts new file mode 100644 index 0000000000..8747f450a2 --- /dev/null +++ b/packages/blueprints-integration/src/context/tTimersContext.ts @@ -0,0 +1,119 @@ +export type IPlaylistTTimerIndex = 1 | 2 | 3 + +export interface ITTimersContext { + /** + * Get a T-timer by its index + * Note: Index is 1-based (1, 2, 3) + * @param index Number of the timer to retrieve + */ + getTimer(index: IPlaylistTTimerIndex): IPlaylistTTimer + + /** + * Clear all T-timers + */ + clearAllTimers(): void +} + +export interface IPlaylistTTimer { + readonly index: IPlaylistTTimerIndex + + /** The label of the T-timer */ + readonly label: string + + /** + * The current state of the T-timer + * Null if the T-timer is not initialized + */ + readonly state: IPlaylistTTimerState | null + + /** Set the label of the T-timer */ + setLabel(label: string): void + + /** Clear the T-timer back to an uninitialized state */ + clearTimer(): void + + /** + * Start a countdown timer + * @param duration Duration of the countdown in milliseconds + * @param options Options for the countdown + */ + startCountdown(duration: number, options?: { stopAtZero?: boolean; startPaused?: boolean }): void + + /** + * Start a timeOfDay timer, counting towards the target time + * This will throw if it is unable to parse the target time + * @param targetTime The target time, as a string (e.g. "14:30", "2023-12-31T23:59:59Z") or a timestamp number + */ + startTimeOfDay(targetTime: string | number, options?: { stopAtZero?: boolean }): void + + /** + * Start a free-running timer + */ + startFreeRun(options?: { startPaused?: boolean }): void + + /** + * If the current mode supports being paused, pause the timer + * Note: This is supported by the countdown and freerun modes + * @returns True if the timer was paused, false if it could not be paused + */ + pause(): boolean + + /** + * If the current mode supports being paused, resume the timer + * This is the opposite of `pause()` + * @returns True if the timer was resumed, false if it could not be resumed + */ + resume(): boolean + + /** + * If the timer can be restarted, restore it to its initial/restarted state + * Note: This is supported by the countdown and timeOfDay modes + * @returns True if the timer was restarted, false if it could not be restarted + */ + restart(): boolean +} + +export type IPlaylistTTimerState = + | IPlaylistTTimerStateCountdown + | IPlaylistTTimerStateFreeRun + | IPlaylistTTimerStateTimeOfDay + +export interface IPlaylistTTimerStateCountdown { + /** The mode of the T-timer */ + readonly mode: 'countdown' + /** The current time of the countdown, in milliseconds */ + readonly currentTime: number + /** The total duration of the countdown, in milliseconds */ + readonly duration: number + /** Whether the timer is currently paused */ + readonly paused: boolean + + /** If the countdown is set to stop at zero, or continue into negative values */ + readonly stopAtZero: boolean +} +export interface IPlaylistTTimerStateFreeRun { + /** The mode of the T-timer */ + readonly mode: 'freeRun' + /** The current time of the freerun, in milliseconds */ + readonly currentTime: number + /** Whether the timer is currently paused */ + readonly paused: boolean +} + +export interface IPlaylistTTimerStateTimeOfDay { + /** The mode of the T-timer */ + readonly mode: 'timeOfDay' + /** The current remaining time of the timer, in milliseconds */ + readonly currentTime: number + /** The target timestamp of the timer, in milliseconds */ + readonly targetTime: number + + /** + * The raw target string of the timer, as provided when setting the timer + * (e.g. "14:30", "2023-12-31T23:59:59Z", or a timestamp number) + */ + readonly targetRaw: string | number + + /** If the countdown is set to stop at zero, or continue into negative values */ + readonly stopAtZero: boolean +} diff --git a/packages/corelib/src/__tests__/hash.spec.ts b/packages/corelib/src/__tests__/hash.spec.ts new file mode 100644 index 0000000000..ea0ff95eee --- /dev/null +++ b/packages/corelib/src/__tests__/hash.spec.ts @@ -0,0 +1,227 @@ +import { hashObj } from '../hash.js' + +describe('hashObj', () => { + describe('primitive types', () => { + test('string values', () => { + expect(hashObj('hello')).toBe(hashObj('hello')) + expect(hashObj('hello')).not.toBe(hashObj('world')) + }) + + test('number values', () => { + expect(hashObj(123)).toBe(hashObj(123)) + expect(hashObj(123)).not.toBe(hashObj(456)) + expect(hashObj(0)).toBe(hashObj(0)) + }) + + test('boolean values', () => { + expect(hashObj(true)).toBe(hashObj(true)) + expect(hashObj(false)).toBe(hashObj(false)) + expect(hashObj(true)).not.toBe(hashObj(false)) + }) + + test('undefined should produce consistent hash', () => { + expect(hashObj(undefined)).toBe(hashObj(undefined)) + }) + + test('null should produce consistent hash', () => { + const hash1 = hashObj(null) + const hash2 = hashObj(null) + expect(hash1).toBe(hash2) + }) + + test('null and undefined should produce different hashes', () => { + expect(hashObj(null)).not.toBe(hashObj(undefined)) + }) + }) + + describe('object stability', () => { + test('same properties in different order should produce same hash', () => { + const obj1 = { a: 1, b: 2, c: 3 } + const obj2 = { c: 3, a: 1, b: 2 } + const obj3 = { b: 2, c: 3, a: 1 } + + expect(hashObj(obj1)).toBe(hashObj(obj2)) + expect(hashObj(obj1)).toBe(hashObj(obj3)) + expect(hashObj(obj2)).toBe(hashObj(obj3)) + }) + + test('different property values should produce different hashes', () => { + const obj1 = { a: 1, b: 2 } + const obj2 = { a: 1, b: 3 } + + expect(hashObj(obj1)).not.toBe(hashObj(obj2)) + }) + + test('different properties should produce different hashes', () => { + const obj1 = { a: 1, b: 2 } + const obj2 = { a: 1, c: 2 } + + expect(hashObj(obj1)).not.toBe(hashObj(obj2)) + }) + }) + + describe('nested objects', () => { + test('nested objects with same structure should produce same hash', () => { + const obj1 = { a: 1, b: { c: 2, d: 3 } } + const obj2 = { b: { d: 3, c: 2 }, a: 1 } + + expect(hashObj(obj1)).toBe(hashObj(obj2)) + }) + + test('deeply nested objects should be stable', () => { + const obj1 = { + level1: { + level2: { + level3: { + value: 'deep', + }, + }, + }, + } + const obj2 = { + level1: { + level2: { + level3: { + value: 'deep', + }, + }, + }, + } + + expect(hashObj(obj1)).toBe(hashObj(obj2)) + }) + + test('objects with null values should work', () => { + const obj1 = { a: 1, b: null } + const obj2 = { b: null, a: 1 } + + expect(() => hashObj(obj1)).not.toThrow() + expect(hashObj(obj1)).toBe(hashObj(obj2)) + }) + + test('objects with undefined values should work', () => { + const obj1 = { a: 1, b: undefined } + const obj2 = { b: undefined, a: 1 } + + expect(() => hashObj(obj1)).not.toThrow() + expect(hashObj(obj1)).toBe(hashObj(obj2)) + }) + }) + + describe('arrays', () => { + test('arrays should produce consistent hashes', () => { + const arr1 = [1, 2, 3] + const arr2 = [1, 2, 3] + + expect(hashObj(arr1)).toBe(hashObj(arr2)) + }) + + test('arrays with different order should produce different hashes', () => { + const arr1 = [1, 2, 3] + const arr2 = [3, 2, 1] + + // Arrays maintain order, so different order = different hash + expect(hashObj(arr1)).not.toBe(hashObj(arr2)) + }) + + test('empty arrays should produce consistent hash', () => { + expect(hashObj([])).toBe(hashObj([])) + }) + + test('nested arrays should work', () => { + const arr1 = [1, [2, 3], 4] + const arr2 = [1, [2, 3], 4] + + expect(hashObj(arr1)).toBe(hashObj(arr2)) + }) + + test('arrays with null should work', () => { + const arr1 = [1, null, 3] + const arr2 = [1, null, 3] + + expect(() => hashObj(arr1)).not.toThrow() + expect(hashObj(arr1)).toBe(hashObj(arr2)) + }) + }) + + describe('edge cases', () => { + test('empty object should produce consistent hash', () => { + expect(hashObj({})).toBe(hashObj({})) + }) + + test('object with empty string key should work', () => { + const obj = { '': 'value' } + expect(() => hashObj(obj)).not.toThrow() + expect(hashObj(obj)).toBe(hashObj({ '': 'value' })) + }) + + test('object with numeric string keys should be stable', () => { + const obj1 = { '1': 'a', '2': 'b' } + const obj2 = { '2': 'b', '1': 'a' } + + expect(hashObj(obj1)).toBe(hashObj(obj2)) + }) + + test('objects with mixed types should work', () => { + const obj = { + string: 'value', + number: 42, + boolean: true, + null: null, + undefined: undefined, + nested: { a: 1 }, + array: [1, 2, 3], + } + + expect(() => hashObj(obj)).not.toThrow() + expect(hashObj(obj)).toBe(hashObj(obj)) + }) + }) + + describe('consistency with simple values', () => { + test('string should be consistent', () => { + const str = 'test' + const hash1 = hashObj(str) + const hash2 = hashObj(str) + expect(hash1).toBe(hash2) + }) + + test('number zero should be different from empty string', () => { + expect(hashObj(0)).not.toBe(hashObj('')) + }) + + test('false should be different from 0', () => { + expect(hashObj(false)).not.toBe(hashObj(0)) + }) + }) + + describe('undefined property equivalence', () => { + test('object with undefined property should equal empty object', () => { + const obj1 = { a: undefined } + const obj2 = {} + + expect(hashObj(obj1)).toBe(hashObj(obj2)) + }) + + test('multiple undefined properties should equal empty object', () => { + const obj1 = { a: undefined, b: undefined } + const obj2 = {} + + expect(hashObj(obj1)).toBe(hashObj(obj2)) + }) + + test('mixed undefined and defined properties', () => { + const obj1 = { a: 1, b: undefined, c: 2 } + const obj2 = { a: 1, c: 2 } + + expect(hashObj(obj1)).toBe(hashObj(obj2)) + }) + + test('nested objects with undefined properties', () => { + const obj1 = { a: 1, b: { c: undefined } } + const obj2 = { a: 1, b: {} } + + expect(hashObj(obj1)).toBe(hashObj(obj2)) + }) + }) +}) diff --git a/packages/corelib/src/dataModel/ExpectedPackageWorkStatuses.ts b/packages/corelib/src/dataModel/ExpectedPackageWorkStatuses.ts index 68f682ebdf..8411fb5791 100644 --- a/packages/corelib/src/dataModel/ExpectedPackageWorkStatuses.ts +++ b/packages/corelib/src/dataModel/ExpectedPackageWorkStatuses.ts @@ -1,6 +1,5 @@ import { ExpectedPackageStatusAPI, Time } from '@sofie-automation/blueprints-integration' -import { ExpectedPackageDBBase } from './ExpectedPackages.js' -import { ExpectedPackageWorkStatusId, PeripheralDeviceId } from './Ids.js' +import { ExpectedPackageId, ExpectedPackageWorkStatusId, PeripheralDeviceId, StudioId } from './Ids.js' /** * ExpectedPackageWorkStatus contains statuses about Work that is being performed on expected packages @@ -10,7 +9,7 @@ import { ExpectedPackageWorkStatusId, PeripheralDeviceId } from './Ids.js' export interface ExpectedPackageWorkStatus extends Omit { _id: ExpectedPackageWorkStatusId - studioId: ExpectedPackageDBBase['studioId'] + studioId: StudioId fromPackages: ExpectedPackageWorkStatusFromPackage[] /** Which PeripheralDevice this update came from */ @@ -20,5 +19,5 @@ export interface ExpectedPackageWorkStatus extends Omit { - id: ExpectedPackageDBBase['_id'] + id: ExpectedPackageId } diff --git a/packages/corelib/src/dataModel/ExpectedPackages.ts b/packages/corelib/src/dataModel/ExpectedPackages.ts index 2e91000143..e2d49c2c26 100644 --- a/packages/corelib/src/dataModel/ExpectedPackages.ts +++ b/packages/corelib/src/dataModel/ExpectedPackages.ts @@ -18,7 +18,7 @@ import { import { ReadonlyDeep } from 'type-fest' /* - Expected Packages are created from Pieces in the rundown. + Expected Packages are created from Pieces and other content in the rundown. A "Package" is a generic term for a "thing that can be played", such as media files, audio, graphics etc.. The blueprints generate Pieces with expectedPackages on them. These are then picked up by a Package Manager who then tries to fullfill the expectations. @@ -26,22 +26,6 @@ import { ReadonlyDeep } from 'type-fest' The Package Manager will then copy the file to the right place. */ -export type ExpectedPackageFromRundown = ExpectedPackageDBFromPiece | ExpectedPackageDBFromAdLibAction - -export type ExpectedPackageFromRundownBaseline = - | ExpectedPackageDBFromBaselineAdLibAction - | ExpectedPackageDBFromBaselineAdLibPiece - | ExpectedPackageDBFromRundownBaselineObjects - | ExpectedPackageDBFromBaselinePiece - -export type ExpectedPackageDBFromBucket = ExpectedPackageDBFromBucketAdLib | ExpectedPackageDBFromBucketAdLibAction - -export type ExpectedPackageDB = - | ExpectedPackageFromRundown - | ExpectedPackageDBFromBucket - | ExpectedPackageFromRundownBaseline - | ExpectedPackageDBFromStudioBaselineObjects - export enum ExpectedPackageDBType { PIECE = 'piece', ADLIB_PIECE = 'adlib_piece', @@ -54,23 +38,60 @@ export enum ExpectedPackageDBType { RUNDOWN_BASELINE_OBJECTS = 'rundown_baseline_objects', STUDIO_BASELINE_OBJECTS = 'studio_baseline_objects', } -export interface ExpectedPackageDBBase extends Omit { - _id: ExpectedPackageId - /** The local package id - as given by the blueprints */ - blueprintPackageId: string + +export interface ExpectedPackageDB { + _id: ExpectedPackageId // derived from rundownId and hash of `package` /** The studio of the Rundown of the Piece this package belongs to */ studioId: StudioId - /** Hash that changes whenever the content or version changes. See getContentVersionHash() */ - contentVersionHash: string - - // pieceId: ProtectedString | null - fromPieceType: ExpectedPackageDBType + /** The rundown this package belongs to, if any. Must not be set when bucketId is set */ + rundownId: RundownId | null + /** The bucket this package belongs to, if any. Must not be set when rundownId is set */ + bucketId: BucketId | null created: Time + + package: ReadonlyDeep> + + /** + * The ingest sources that generated this package. + */ + ingestSources: ExpectedPackageIngestSource[] + + playoutSources: { + /** + * Any playout PieceInstance. This can be any non-reset pieceInstance in the rundown. + * Due to the update flow, this can contain some stale data for a few seconds after a playout operation. + */ + pieceInstanceIds: PieceInstanceId[] + } +} + +export interface ExpectedPackageIngestSourceBase { + /** The id of the package as known by the blueprints */ + blueprintPackageId: string + + /** Whether the blueprints are listening for updates to packageInfos for this package */ + listenToPackageInfoUpdates: boolean | undefined +} + +export interface ExpectedPackageIngestSourceBucketAdlibPiece extends ExpectedPackageIngestSourceBase { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB + /** The Bucket adlib this package belongs to */ + pieceId: BucketAdLibId + /** The `externalId` of the Bucket adlib this package belongs to */ + pieceExternalId: string +} +export interface ExpectedPackageIngestSourceBucketAdlibAction extends ExpectedPackageIngestSourceBase { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION + /** The Bucket adlib-action this package belongs to */ + pieceId: BucketAdLibActionId + /** The `externalId` of the Bucket adlib-action this package belongs to */ + pieceExternalId: string } -export interface ExpectedPackageDBFromPiece extends ExpectedPackageDBBase { + +export interface ExpectedPackageIngestSourcePiece extends ExpectedPackageIngestSourceBase { fromPieceType: ExpectedPackageDBType.PIECE | ExpectedPackageDBType.ADLIB_PIECE /** The Piece this package belongs to */ pieceId: PieceId @@ -78,93 +99,82 @@ export interface ExpectedPackageDBFromPiece extends ExpectedPackageDBBase { partId: PartId /** The Segment this package belongs to */ segmentId: SegmentId - /** The rundown of the Piece this package belongs to */ - rundownId: RundownId } -export interface ExpectedPackageDBFromBaselinePiece extends ExpectedPackageDBBase { +export interface ExpectedPackageIngestSourceAdlibAction extends ExpectedPackageIngestSourceBase { + fromPieceType: ExpectedPackageDBType.ADLIB_ACTION + /** The Piece this package belongs to */ + pieceId: AdLibActionId + /** The Part this package belongs to */ + partId: PartId + /** The Segment this package belongs to */ + segmentId: SegmentId +} +export interface ExpectedPackageIngestSourceBaselinePiece extends ExpectedPackageIngestSourceBase { fromPieceType: ExpectedPackageDBType.BASELINE_PIECE /** The Piece this package belongs to */ pieceId: PieceId - /** The rundown of the Piece this package belongs to */ - rundownId: RundownId } - -export interface ExpectedPackageDBFromBaselineAdLibPiece extends ExpectedPackageDBBase { +export interface ExpectedPackageIngestSourceBaselineAdlibPiece extends ExpectedPackageIngestSourceBase { fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_PIECE /** The Piece this package belongs to */ pieceId: PieceId - /** The rundown of the Piece this package belongs to */ - rundownId: RundownId -} - -export interface ExpectedPackageDBFromAdLibAction extends ExpectedPackageDBBase { - fromPieceType: ExpectedPackageDBType.ADLIB_ACTION - /** The Adlib Action this package belongs to */ - pieceId: AdLibActionId - /** The Part this package belongs to */ - partId: PartId - /** The Segment this package belongs to */ - segmentId: SegmentId - /** The rundown of the Piece this package belongs to */ - rundownId: RundownId } -export interface ExpectedPackageDBFromBaselineAdLibAction extends ExpectedPackageDBBase { +export interface ExpectedPackageIngestSourceBaselineAdlibAction extends ExpectedPackageIngestSourceBase { fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_ACTION /** The Piece this package belongs to */ pieceId: RundownBaselineAdLibActionId - /** The rundown of the Piece this package belongs to */ - rundownId: RundownId } - -export interface ExpectedPackageDBFromRundownBaselineObjects extends ExpectedPackageDBBase { +export interface ExpectedPackageIngestSourceBaselineObjects extends ExpectedPackageIngestSourceBase { fromPieceType: ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS - /** The rundown of the Piece this package belongs to */ - rundownId: RundownId - pieceId: null } -export interface ExpectedPackageDBFromStudioBaselineObjects extends ExpectedPackageDBBase { + +export interface ExpectedPackageIngestSourceStudioBaseline extends ExpectedPackageIngestSourceBase { + // Future: Technically this is a playout source, but for now it needs to be treated as an ingest source fromPieceType: ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS - pieceId: null } -export interface ExpectedPackageDBFromBucketAdLib extends ExpectedPackageDBBase { - fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB - bucketId: BucketId - /** The Bucket adlib this package belongs to */ - pieceId: BucketAdLibId - /** The `externalId` of the Bucket adlib this package belongs to */ - pieceExternalId: string -} -export interface ExpectedPackageDBFromBucketAdLibAction extends ExpectedPackageDBBase { - fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION - bucketId: BucketId - /** The Bucket adlib-action this package belongs to */ - pieceId: BucketAdLibActionId - /** The `externalId` of the Bucket adlib-action this package belongs to */ - pieceExternalId: string -} +export type ExpectedPackageIngestSourcePart = ExpectedPackageIngestSourcePiece | ExpectedPackageIngestSourceAdlibAction -export function getContentVersionHash(expectedPackage: ReadonlyDeep>): string { - return hashObj({ - content: expectedPackage.content, - version: expectedPackage.version, - // todo: should expectedPackage.sources.containerId be here as well? - }) -} +export type ExpectedPackageIngestSourceBucket = + | ExpectedPackageIngestSourceBucketAdlibPiece + | ExpectedPackageIngestSourceBucketAdlibAction +export type ExpectedPackageIngestSourceRundownBaseline = + | ExpectedPackageIngestSourceBaselinePiece + | ExpectedPackageIngestSourceBaselineAdlibPiece + | ExpectedPackageIngestSourceBaselineAdlibAction + | ExpectedPackageIngestSourceBaselineObjects + +export type ExpectedPackageIngestSource = + | ExpectedPackageIngestSourcePart + | ExpectedPackageIngestSourceRundownBaseline + | ExpectedPackageIngestSourceBucket + | ExpectedPackageIngestSourceStudioBaseline + +/** + * Generate the expectedPackageId for the given expectedPackage. + * This is a stable id derived from the package and its parent. This document is expected to be owned by multiple sources. + */ export function getExpectedPackageId( - /** _id of the owner (the piece, adlib etc..) */ - ownerId: - | PieceId - | PieceInstanceId - | AdLibActionId - | RundownBaselineAdLibActionId - | BucketAdLibId - | BucketAdLibActionId - | RundownId - | StudioId, + /** Preferably a RundownId or BucketId, but StudioId is allowed when not owned by a rundown or bucket */ + parentId: RundownId | StudioId | BucketId, /** The locally unique id of the expectedPackage */ - localExpectedPackageId: ExpectedPackage.Base['_id'] + expectedPackage: ReadonlyDeep> ): ExpectedPackageId { - return protectString(`${ownerId}_${getHash(localExpectedPackageId)}`) + // This may be too agressive, but we don't know how to merge some of the properties + const objHash = hashObj({ + ...expectedPackage, + _id: '', // Ignore the _id, this is not guaranteed to be stable + listenToPackageInfoUpdates: false, // Not relevant for the hash + } satisfies ReadonlyDeep) + + return protectString(`${parentId}_${getHash(objHash)}`) +} + +/** + * Returns true if the expected package is referenced by any playout PieceInstances + * @returns boolean + */ +export function isPackageReferencedByPlayout(expectedPackage: Pick): boolean { + return expectedPackage.playoutSources.pieceInstanceIds.length > 0 } diff --git a/packages/corelib/src/dataModel/Old/ExpectedPackagesR52.ts b/packages/corelib/src/dataModel/Old/ExpectedPackagesR52.ts new file mode 100644 index 0000000000..7e74aadc4b --- /dev/null +++ b/packages/corelib/src/dataModel/Old/ExpectedPackagesR52.ts @@ -0,0 +1,137 @@ +import type { ExpectedPackage, Time } from '@sofie-automation/blueprints-integration' +import type { + AdLibActionId, + BucketAdLibActionId, + BucketAdLibId, + BucketId, + ExpectedPackageId, + PartId, + PieceId, + RundownBaselineAdLibActionId, + RundownId, + SegmentId, + StudioId, +} from '../Ids.js' + +/** + * Warning: This is a snapshot of the ExpectedPackage interface from before the rework in R53. + * This should not be modified and should only be used in code performing fixup operations. + */ + +/* + Expected Packages are created from Pieces in the rundown. + A "Package" is a generic term for a "thing that can be played", such as media files, audio, graphics etc.. + The blueprints generate Pieces with expectedPackages on them. + These are then picked up by a Package Manager who then tries to fullfill the expectations. + Example: An ExpectedPackage could be a "Media file to be present on the location used by a playout device". + The Package Manager will then copy the file to the right place. +*/ + +export type ExpectedPackageFromRundown = ExpectedPackageDBFromPiece | ExpectedPackageDBFromAdLibAction + +export type ExpectedPackageFromRundownBaseline = + | ExpectedPackageDBFromBaselineAdLibAction + | ExpectedPackageDBFromBaselineAdLibPiece + | ExpectedPackageDBFromRundownBaselineObjects + +export type ExpectedPackageDBFromBucket = ExpectedPackageDBFromBucketAdLib | ExpectedPackageDBFromBucketAdLibAction + +export type ExpectedPackageDB = + | ExpectedPackageFromRundown + | ExpectedPackageDBFromBucket + | ExpectedPackageFromRundownBaseline + | ExpectedPackageDBFromStudioBaselineObjects + +export enum ExpectedPackageDBType { + PIECE = 'piece', + ADLIB_PIECE = 'adlib_piece', + ADLIB_ACTION = 'adlib_action', + BASELINE_ADLIB_PIECE = 'baseline_adlib_piece', + BASELINE_ADLIB_ACTION = 'baseline_adlib_action', + BUCKET_ADLIB = 'bucket_adlib', + BUCKET_ADLIB_ACTION = 'bucket_adlib_action', + RUNDOWN_BASELINE_OBJECTS = 'rundown_baseline_objects', + STUDIO_BASELINE_OBJECTS = 'studio_baseline_objects', +} +export interface ExpectedPackageDBBase extends Omit { + _id: ExpectedPackageId + /** The local package id - as given by the blueprints */ + blueprintPackageId: string + + /** The studio of the Rundown of the Piece this package belongs to */ + studioId: StudioId + + /** Hash that changes whenever the content or version changes. See getContentVersionHash() */ + contentVersionHash: string + + // pieceId: ProtectedString | null + fromPieceType: ExpectedPackageDBType + + created: Time +} +export interface ExpectedPackageDBFromPiece extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.PIECE | ExpectedPackageDBType.ADLIB_PIECE + /** The Piece this package belongs to */ + pieceId: PieceId + /** The Part this package belongs to */ + partId: PartId + /** The Segment this package belongs to */ + segmentId: SegmentId + /** The rundown of the Piece this package belongs to */ + rundownId: RundownId +} + +export interface ExpectedPackageDBFromBaselineAdLibPiece extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_PIECE + /** The Piece this package belongs to */ + pieceId: PieceId + /** The rundown of the Piece this package belongs to */ + rundownId: RundownId +} + +export interface ExpectedPackageDBFromAdLibAction extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.ADLIB_ACTION + /** The Adlib Action this package belongs to */ + pieceId: AdLibActionId + /** The Part this package belongs to */ + partId: PartId + /** The Segment this package belongs to */ + segmentId: SegmentId + /** The rundown of the Piece this package belongs to */ + rundownId: RundownId +} +export interface ExpectedPackageDBFromBaselineAdLibAction extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_ACTION + /** The Piece this package belongs to */ + pieceId: RundownBaselineAdLibActionId + /** The rundown of the Piece this package belongs to */ + rundownId: RundownId +} + +export interface ExpectedPackageDBFromRundownBaselineObjects extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS + /** The rundown of the Piece this package belongs to */ + rundownId: RundownId + pieceId: null +} +export interface ExpectedPackageDBFromStudioBaselineObjects extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS + pieceId: null +} + +export interface ExpectedPackageDBFromBucketAdLib extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB + bucketId: BucketId + /** The Bucket adlib this package belongs to */ + pieceId: BucketAdLibId + /** The `externalId` of the Bucket adlib this package belongs to */ + pieceExternalId: string +} +export interface ExpectedPackageDBFromBucketAdLibAction extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION + bucketId: BucketId + /** The Bucket adlib-action this package belongs to */ + pieceId: BucketAdLibActionId + /** The `externalId` of the Bucket adlib-action this package belongs to */ + pieceExternalId: string +} diff --git a/packages/corelib/src/dataModel/PackageInfos.ts b/packages/corelib/src/dataModel/PackageInfos.ts index 879875be8a..4305aacd26 100644 --- a/packages/corelib/src/dataModel/PackageInfos.ts +++ b/packages/corelib/src/dataModel/PackageInfos.ts @@ -1,6 +1,5 @@ import { PackageInfo, Time } from '@sofie-automation/blueprints-integration' import { protectString } from '../protectedString.js' -import { ExpectedPackageDB } from './ExpectedPackages.js' import { ExpectedPackageId, PackageInfoId, PeripheralDeviceId, StudioId } from './Ids.js' /** @@ -14,7 +13,7 @@ export interface PackageInfoDB extends PackageInfo.Base { /** Reference to the Package this document has info about */ packageId: ExpectedPackageId /** Reference to the contentVersionHash of the ExpectedPackage, used to reference the expected content+version of the Package */ - expectedContentVersionHash: ExpectedPackageDB['contentVersionHash'] + expectedContentVersionHash: string /** Referring to the actual contentVersionHash of the Package, used to reference the exact content+version of the Package */ actualContentVersionHash: string diff --git a/packages/corelib/src/dataModel/PieceInstance.ts b/packages/corelib/src/dataModel/PieceInstance.ts index 7c69166887..1847a55969 100644 --- a/packages/corelib/src/dataModel/PieceInstance.ts +++ b/packages/corelib/src/dataModel/PieceInstance.ts @@ -7,6 +7,7 @@ import { RundownId, PartInstanceId, PieceId, + ExpectedPackageId, } from './Ids.js' import { Piece } from './Piece.js' import { omit } from '../lib.js' @@ -74,6 +75,13 @@ export interface PieceInstance { reportedStoppedPlayback?: Time plannedStartedPlayback?: Time plannedStoppedPlayback?: Time + + /** + * The IDs of ExpectedPackages that are needed for this PieceInstance + * This matches the data on `this.piece.expectedPackages`, resolved to the full database IDs + * Future: This should replace the expectedPackages on Piece entirely + */ + neededExpectedPackageIds?: ExpectedPackageId[] } export interface ResolvedPieceInstance { diff --git a/packages/corelib/src/dataModel/RundownPlaylist.ts b/packages/corelib/src/dataModel/RundownPlaylist.ts index e2850bc49b..0ea7a83fe8 100644 --- a/packages/corelib/src/dataModel/RundownPlaylist.ts +++ b/packages/corelib/src/dataModel/RundownPlaylist.ts @@ -94,6 +94,81 @@ export interface QuickLoopProps { forceAutoNext: ForceQuickLoopAutoNext } +export type RundownTTimerMode = RundownTTimerModeFreeRun | RundownTTimerModeCountdown | RundownTTimerModeTimeOfDay + +export interface RundownTTimerModeFreeRun { + readonly type: 'freeRun' + /** + * Starting time (unix timestamp) + * This may not be the original start time, if the timer has been paused/resumed + */ + startTime: number + /** + * Set to a timestamp to pause the timer at that timestamp + * When unpausing, the `startTime` should be adjusted to account for the paused duration + */ + pauseTime: number | null + /** The direction to count */ + // direction: 'up' | 'down' // TODO: does this make sense? +} +export interface RundownTTimerModeCountdown { + readonly type: 'countdown' + /** + * Starting time (unix timestamp) + * This may not be the original start time, if the timer has been paused/resumed + */ + startTime: number + /** + * Set to a timestamp to pause the timer at that timestamp + * When unpausing, the `targetTime` should be adjusted to account for the paused duration + */ + pauseTime: number | null + /** + * The duration of the countdown in milliseconds + */ + readonly duration: number + + /** + * If the countdown should stop at zero, or continue into negative values + */ + readonly stopAtZero: boolean +} +export interface RundownTTimerModeTimeOfDay { + readonly type: 'timeOfDay' + + /** The target timestamp of the timer, in milliseconds */ + targetTime: number + + /** + * The raw target string of the timer, as provided when setting the timer + * (e.g. "14:30", "2023-12-31T23:59:59Z", or a timestamp number) + */ + readonly targetRaw: string | number + + /** + * If the countdown should stop at zero, or continue into negative values + */ + readonly stopAtZero: boolean +} + +export type RundownTTimerIndex = 1 | 2 | 3 + +export interface RundownTTimer { + readonly index: RundownTTimerIndex + + /** A label for the timer */ + label: string + + /** The current mode of the timer, or null if not configured */ + mode: RundownTTimerMode | null + + /* + * Future ideas: + * allowUiControl: boolean + * display: { ... } // some kind of options for how to display in the ui + */ +} + export interface DBRundownPlaylist { _id: RundownPlaylistId /** External ID (source) of the playlist */ @@ -176,6 +251,12 @@ export interface DBRundownPlaylist { trackedAbSessions?: ABSessionInfo[] /** AB playback sessions assigned in the last timeline generation */ assignedAbSessions?: Record + + /** + * T-timers for the Playlist. + * This is a fixed size pool with 3 being chosen as a likely good amount, that can be used for any purpose. + */ + tTimers: [RundownTTimer, RundownTTimer, RundownTTimer] } // Information about a 'selected' PartInstance for the Playlist diff --git a/packages/corelib/src/hash.ts b/packages/corelib/src/hash.ts index 3b38556f37..beb6ab2089 100644 --- a/packages/corelib/src/hash.ts +++ b/packages/corelib/src/hash.ts @@ -8,7 +8,15 @@ export function getHash(str: string): string { /** Creates a hash based on the object properties (excluding ordering of properties) */ // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types export function hashObj(obj: any): string { - if (typeof obj === 'object') { + if (obj === null) return 'null' + if (obj === undefined) return 'undefined' + + if (Array.isArray(obj)) { + // For arrays, we care about the order, and should preserve undefined + const strs = obj.map((val, i) => `${i}:${hashObj(val)}`) + + return getHash(strs.join('|')) + } else if (typeof obj === 'object') { const keys = Object.keys(obj).sort((a, b) => { if (a > b) return 1 if (a < b) return -1 @@ -17,7 +25,11 @@ export function hashObj(obj: any): string { const strs: string[] = [] for (const key of keys) { - strs.push(hashObj(obj[key])) + const val = obj[key] + // Skip undefined values to make {a: undefined} hash the same as {}, matching how JSON/mongo serialization will behave + if (val !== undefined) { + strs.push(`${key}:${hashObj(val)}`) + } } return getHash(strs.join('|')) } diff --git a/packages/corelib/src/mongo.ts b/packages/corelib/src/mongo.ts index 55025f013e..aebfa7fb97 100644 --- a/packages/corelib/src/mongo.ts +++ b/packages/corelib/src/mongo.ts @@ -119,7 +119,14 @@ export function mongoWhere(o: Record, selector: MongoQuery): const oAttr = o[key] if (_.isObject(s)) { - if (_.has(s, '$gt')) { + if (_.has(s, '$elemMatch')) { + // Handle $elemMatch for array fields + if (Array.isArray(oAttr)) { + ok = oAttr.some((item) => mongoWhere(item, s.$elemMatch)) + } else { + ok = false + } + } else if (_.has(s, '$gt')) { ok = oAttr > s.$gt } else if (_.has(s, '$gte')) { ok = oAttr >= s.$gte @@ -222,7 +229,7 @@ export function mongoFindOptions }>( const newDoc: any = {} // any since includeKeys breaks strict typings anyway for (const key of includeKeys) { - objectPath.set(newDoc, key, objectPath.get(doc, key)) + projectFieldIntoDoc(doc, newDoc, key) } return newDoc @@ -246,6 +253,69 @@ export function mongoFindOptions }>( return docs } +/** + * Project a field from a source document into a target document. + * Handles nested paths through arrays like MongoDB does. + * e.g., 'items.name' on {items: [{name: 'a', value: 1}]} => {items: [{name: 'a'}]} + */ +function projectFieldIntoDoc(source: any, target: any, path: string): void { + const parts = path.split('.') + let currentSource = source + let currentTarget = target + + for (let i = 0; i < parts.length; i++) { + const part = parts[i] + const isLast = i === parts.length - 1 + const remainingPath = parts.slice(i + 1).join('.') + + if (currentSource === undefined || currentSource === null) { + return + } + + if (Array.isArray(currentSource)) { + // Handle array - project the field from each element + if (!Array.isArray(currentTarget)) { + // Initialize as empty array if not already an array + const parentPath = parts.slice(0, i).join('.') + if (parentPath) { + objectPath.set(target, parentPath, []) + currentTarget = objectPath.get(target, parentPath) + } else { + return // Can't set root to array + } + } + + // Project the remaining path into each array element + for (let j = 0; j < currentSource.length; j++) { + if (currentTarget[j] === undefined) { + currentTarget[j] = {} + } + const subPath = isLast ? part : [part, remainingPath].join('.') + projectFieldIntoDoc(currentSource[j], currentTarget[j], subPath) + } + return + } + + if (isLast) { + // We've reached the final part of the path + if (currentSource[part] !== undefined) { + currentTarget[part] = currentSource[part] + } + } else { + // Navigate deeper + if (currentTarget[part] === undefined) { + if (Array.isArray(currentSource[part])) { + currentTarget[part] = [] + } else { + currentTarget[part] = {} + } + } + currentSource = currentSource[part] + currentTarget = currentTarget[part] + } + } +} + export function mongoModify }>( selector: MongoQuery, doc: TDoc, @@ -411,17 +481,29 @@ export function pushOntoPath(obj: Record, path: string, valu * Push a value from a object, when the value matches * @param obj Object * @param path Path to array in object - * @param valueToPush Value to push onto array + * @param matchValue Value to match for removal. Supports $in operator for matching multiple values. */ export function pullFromPath(obj: Record, path: string, matchValue: T): void { const mutator = (o: Record, lastAttr: string) => { if (_.has(o, lastAttr)) { - if (!_.isArray(o[lastAttr])) + const arrAttr = o[lastAttr] + if (!arrAttr || !Array.isArray(arrAttr)) throw new Error( - 'Object propery "' + lastAttr + '" is not an array ("' + o[lastAttr] + '") (in path "' + path + '")' + 'Object propery "' + lastAttr + '" is not an array ("' + arrAttr + '") (in path "' + path + '")' ) - return (o[lastAttr] = _.filter(o[lastAttr] as any, (entry: T) => !_.isMatch(entry, matchValue))) + // Handle $in operator for matching multiple values + if ( + matchValue && + typeof matchValue === 'object' && + '$in' in matchValue && + Array.isArray((matchValue as Record).$in) + ) { + const inValues = (matchValue as Record).$in as unknown[] + return (o[lastAttr] = arrAttr.filter((entry: T) => !inValues.includes(entry))) + } + + return (o[lastAttr] = arrAttr.filter((entry: T) => !_.isMatch(entry, matchValue))) } else { return undefined } diff --git a/packages/corelib/src/pubsub.ts b/packages/corelib/src/pubsub.ts index 6a6e7783c4..e14e47894a 100644 --- a/packages/corelib/src/pubsub.ts +++ b/packages/corelib/src/pubsub.ts @@ -18,7 +18,7 @@ import { Blueprint } from './dataModel/Blueprint.js' import { BucketAdLibAction } from './dataModel/BucketAdLibAction.js' import { BucketAdLib } from './dataModel/BucketAdLibPiece.js' import { ExpectedPackageWorkStatus } from './dataModel/ExpectedPackageWorkStatuses.js' -import { ExpectedPackageDBBase } from './dataModel/ExpectedPackages.js' +import { ExpectedPackageDB } from './dataModel/ExpectedPackages.js' import { ExternalMessageQueueObj } from './dataModel/ExternalMessageQueue.js' import { PackageContainerStatusDB } from './dataModel/PackageContainerStatus.js' import { PeripheralDevice } from './dataModel/PeripheralDevice.js' @@ -356,7 +356,7 @@ export type CorelibPubSubCollections = { [CollectionName.Buckets]: Bucket [CollectionName.BucketAdLibActions]: BucketAdLibAction [CollectionName.BucketAdLibPieces]: BucketAdLib - [CollectionName.ExpectedPackages]: ExpectedPackageDBBase + [CollectionName.ExpectedPackages]: ExpectedPackageDB [CollectionName.ExpectedPackageWorkStatuses]: ExpectedPackageWorkStatus [CollectionName.ExternalMessageQueue]: ExternalMessageQueueObj [CollectionName.Notifications]: DBNotificationObj diff --git a/packages/corelib/src/snapshots.ts b/packages/corelib/src/snapshots.ts index 9791c5c719..031addbb6b 100644 --- a/packages/corelib/src/snapshots.ts +++ b/packages/corelib/src/snapshots.ts @@ -35,6 +35,6 @@ export interface CoreRundownPlaylistSnapshot { adLibActions: Array baselineAdLibActions: Array expectedPlayoutItems: Array - expectedPackages: Array + expectedPackages: Array // Note: when reading, this could be in the old format timeline?: TimelineComplete } diff --git a/packages/corelib/src/worker/ingest.ts b/packages/corelib/src/worker/ingest.ts index 3e27a13bc1..ad2c081939 100644 --- a/packages/corelib/src/worker/ingest.ts +++ b/packages/corelib/src/worker/ingest.ts @@ -104,10 +104,6 @@ export enum IngestJobs { */ MosSwapStory = 'mosSwapStory', - /** - * Debug: Regenerate ExpectedPackages for a Rundown - */ - ExpectedPackagesRegenerate = 'expectedPackagesRegenerate', /** * Some PackageInfos have been updated, regenerate any Parts which depend on these PackageInfos */ @@ -229,9 +225,6 @@ export interface MosSwapStoryProps extends IngestPropsBase { story1: MOS.IMOSString128 } -export interface ExpectedPackagesRegenerateProps { - rundownId: RundownId -} export interface PackageInfosUpdatedRundownProps extends IngestPropsBase { packageIds: ExpectedPackageId[] } @@ -312,7 +305,6 @@ export type IngestJobFunc = { [IngestJobs.MosMoveStory]: (data: MosMoveStoryProps) => void [IngestJobs.MosSwapStory]: (data: MosSwapStoryProps) => void - [IngestJobs.ExpectedPackagesRegenerate]: (data: ExpectedPackagesRegenerateProps) => void [IngestJobs.PackageInfosUpdatedRundown]: (data: PackageInfosUpdatedRundownProps) => void [IngestJobs.UserRemoveRundown]: (data: UserRemoveRundownProps) => void diff --git a/packages/corelib/src/worker/studio.ts b/packages/corelib/src/worker/studio.ts index e5df4d1311..6eb045fc5e 100644 --- a/packages/corelib/src/worker/studio.ts +++ b/packages/corelib/src/worker/studio.ts @@ -205,6 +205,12 @@ export enum StudioJobs { * for use in ad.lib actions and other triggers */ SwitchRouteSet = 'switchRouteSet', + + /** + * Cleanup any expected packages playout references that are orphaned + * During playout it is hard to track removal of PieceInstances (particularly when resetting PieceInstances) + */ + CleanupOrphanedExpectedPackageReferences = 'cleanupOrphanedExpectedPackageReferences', } export interface RundownPlayoutPropsBase { @@ -369,6 +375,11 @@ export interface SwitchRouteSetProps { state: boolean | 'toggle' } +export interface CleanupOrphanedExpectedPackageReferencesProps { + playlistId: RundownPlaylistId + rundownId: RundownId +} + /** * Set of valid functions, of form: * `id: (data) => return` @@ -425,6 +436,8 @@ export type StudioJobFunc = { [StudioJobs.ClearQuickLoopMarkers]: (data: ClearQuickLoopMarkersProps) => void [StudioJobs.SwitchRouteSet]: (data: SwitchRouteSetProps) => void + + [StudioJobs.CleanupOrphanedExpectedPackageReferences]: (data: CleanupOrphanedExpectedPackageReferencesProps) => void } export function getStudioQueueName(id: StudioId): string { diff --git a/packages/job-worker/package.json b/packages/job-worker/package.json index 21ed27ecfc..356f8d57a8 100644 --- a/packages/job-worker/package.json +++ b/packages/job-worker/package.json @@ -45,6 +45,7 @@ "@sofie-automation/corelib": "1.53.0-in-development", "@sofie-automation/shared-lib": "1.53.0-in-development", "amqplib": "^0.10.5", + "chrono-node": "^2.9.0", "deepmerge": "^4.3.1", "elastic-apm-node": "^4.11.0", "mongodb": "^6.12.0", diff --git a/packages/job-worker/src/__mocks__/collection.ts b/packages/job-worker/src/__mocks__/collection.ts index 0c63527d77..065f9ee098 100644 --- a/packages/job-worker/src/__mocks__/collection.ts +++ b/packages/job-worker/src/__mocks__/collection.ts @@ -190,6 +190,16 @@ export class MockMongoCollection }> imp return docs.length } + private async removeOne(selector: MongoQuery | TDoc['_id']): Promise { + this.#ops.push({ type: 'removeOne', args: [selector] }) + + const docs: Pick[] = await this.findFetchInner(selector, { projection: { _id: 1 }, limit: 1 }) + for (const doc of docs) { + this.#documents.delete(doc._id) + } + + return docs.length + } async update(selector: MongoQuery | TDoc['_id'], modifier: MongoModifier): Promise { return this.updateInner(selector, modifier, false) } @@ -231,8 +241,12 @@ export class MockMongoCollection }> imp await this.updateInner(op.updateOne.filter, op.updateOne.update, true) } else if ('replaceOne' in op) { await this.replace(op.replaceOne.replacement as any) + } else if ('insertOne' in op) { + await this.insertOne(op.insertOne.document as any) } else if ('deleteMany' in op) { await this.remove(op.deleteMany.filter) + } else if ('deleteOne' in op) { + await this.removeOne(op.deleteOne.filter) } else { // Note: implement more as we start using them throw new Error(`Unknown mongo Bulk Operation: ${JSON.stringify(op)}`) diff --git a/packages/job-worker/src/__mocks__/context.ts b/packages/job-worker/src/__mocks__/context.ts index d11c3c5431..85ba9eaa9e 100644 --- a/packages/job-worker/src/__mocks__/context.ts +++ b/packages/job-worker/src/__mocks__/context.ts @@ -29,7 +29,7 @@ import { clone } from '@sofie-automation/corelib/dist/lib' import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { EventsJobFunc } from '@sofie-automation/corelib/dist/worker/events' import { IngestJobFunc } from '@sofie-automation/corelib/dist/worker/ingest' -import { StudioJobFunc } from '@sofie-automation/corelib/dist/worker/studio' +import { StudioJobFunc, StudioJobs } from '@sofie-automation/corelib/dist/worker/studio' import { ReadonlyDeep } from 'type-fest' import { WrappedShowStyleBlueprint, WrappedStudioBlueprint } from '../blueprints/cache.js' import { @@ -46,6 +46,7 @@ import { ProcessedShowStyleBase, ProcessedShowStyleCompound, ProcessedShowStyleVariant, + QueueJobOptions, } from '../jobs/index.js' import { PlaylistLock, RundownLock } from '../jobs/lock.js' import { BaseModel } from '../modelBase.js' @@ -153,9 +154,14 @@ export class MockJobContext implements JobContext { throw new Error('Method not implemented.') } async queueStudioJob( - _name: T, - _data: Parameters[0] + name: T, + _data: Parameters[0], + _options?: QueueJobOptions ): Promise { + // Silently ignore the cleanup job - it's a background task that doesn't need to run in tests + if (name === StudioJobs.CleanupOrphanedExpectedPackageReferences) { + return + } throw new Error('Method not implemented.') } async queueEventJob( diff --git a/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts b/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts index 88869a4da8..8d705cc1b7 100644 --- a/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts +++ b/packages/job-worker/src/__mocks__/defaultCollectionObjects.ts @@ -44,6 +44,12 @@ export function defaultRundownPlaylist(_id: RundownPlaylistId, studioId: StudioI type: PlaylistTimingType.None, }, rundownIdsInOrder: [], + + tTimers: [ + { index: 1, label: '', mode: null }, + { index: 2, label: '', mode: null }, + { index: 3, label: '', mode: null }, + ], } } export function defaultRundown( diff --git a/packages/job-worker/src/blueprints/context/OnSetAsNextContext.ts b/packages/job-worker/src/blueprints/context/OnSetAsNextContext.ts index a476c1c593..aad318fdb6 100644 --- a/packages/job-worker/src/blueprints/context/OnSetAsNextContext.ts +++ b/packages/job-worker/src/blueprints/context/OnSetAsNextContext.ts @@ -9,7 +9,7 @@ import { IBlueprintPieceDB, IBlueprintPieceInstance, IBlueprintResolvedPieceInstance, - IBlueprintSegment, + IBlueprintSegmentDB, IEventContext, IOnSetAsNextContext, } from '@sofie-automation/blueprints-integration' @@ -28,11 +28,16 @@ import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { selectNewPartWithOffsets } from '../../playout/moveNextPart.js' import { getOrderedPartsAfterPlayhead } from '../../playout/lookahead/util.js' import { convertPartToBlueprints } from './lib.js' +import { TTimersService } from './services/TTimersService.js' +import type { IPlaylistTTimer } from '@sofie-automation/blueprints-integration/dist/context/tTimersContext' +import type { RundownTTimerIndex } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' export class OnSetAsNextContext extends ShowStyleUserContext implements IOnSetAsNextContext, IEventContext, IPartAndPieceInstanceActionContext { + readonly #tTimersService: TTimersService + public pendingMoveNextPart: { selectedPart: ReadonlyDeep | null } | undefined = undefined constructor( @@ -45,6 +50,7 @@ export class OnSetAsNextContext public readonly manuallySelected: boolean ) { super(contextInfo, context, showStyle, watchedPackages) + this.#tTimersService = new TTimersService(playoutModel) } public get quickLoopInfo(): BlueprintQuickLookInfo | null { @@ -75,7 +81,7 @@ export class OnSetAsNextContext return this.partAndPieceInstanceService.getResolvedPieceInstances(part) } - async getSegment(segment: 'current' | 'next'): Promise { + async getSegment(segment: 'current' | 'next'): Promise { return this.partAndPieceInstanceService.getSegment(segment) } @@ -159,4 +165,11 @@ export class OnSetAsNextContext getCurrentTime(): number { return getCurrentTime() } + + getTimer(index: RundownTTimerIndex): IPlaylistTTimer { + return this.#tTimersService.getTimer(index) + } + clearAllTimers(): void { + this.#tTimersService.clearAllTimers() + } } diff --git a/packages/job-worker/src/blueprints/context/OnTakeContext.ts b/packages/job-worker/src/blueprints/context/OnTakeContext.ts index 9d431d9958..0417e146a6 100644 --- a/packages/job-worker/src/blueprints/context/OnTakeContext.ts +++ b/packages/job-worker/src/blueprints/context/OnTakeContext.ts @@ -12,7 +12,7 @@ import { TSR, IBlueprintPlayoutDevice, IOnTakeContext, - IBlueprintSegment, + IBlueprintSegmentDB, } from '@sofie-automation/blueprints-integration' import { PeripheralDeviceId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { ReadonlyDeep } from 'type-fest' @@ -27,8 +27,13 @@ import { ActionPartChange, PartAndPieceInstanceActionService } from './services/ import { BlueprintQuickLookInfo } from '@sofie-automation/blueprints-integration/dist/context/quickLoopInfo' import { getOrderedPartsAfterPlayhead } from '../../playout/lookahead/util.js' import { convertPartToBlueprints } from './lib.js' +import type { IPlaylistTTimer } from '@sofie-automation/blueprints-integration/dist/context/tTimersContext' +import { TTimersService } from './services/TTimersService.js' +import type { RundownTTimerIndex } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' export class OnTakeContext extends ShowStyleUserContext implements IOnTakeContext, IEventContext { + readonly #tTimersService: TTimersService + public isTakeAborted: boolean public get quickLoopInfo(): BlueprintQuickLookInfo | null { @@ -52,6 +57,7 @@ export class OnTakeContext extends ShowStyleUserContext implements IOnTakeContex ) { super(contextInfo, _context, showStyle, watchedPackages) this.isTakeAborted = false + this.#tTimersService = new TTimersService(_playoutModel) } async getUpcomingParts(limit: number = 5): Promise> { @@ -71,7 +77,7 @@ export class OnTakeContext extends ShowStyleUserContext implements IOnTakeContex async getResolvedPieceInstances(part: 'current' | 'next'): Promise { return this.partAndPieceInstanceService.getResolvedPieceInstances(part) } - async getSegment(segment: 'current' | 'next'): Promise { + async getSegment(segment: 'current' | 'next'): Promise { return this.partAndPieceInstanceService.getSegment(segment) } @@ -162,4 +168,11 @@ export class OnTakeContext extends ShowStyleUserContext implements IOnTakeContex getCurrentTime(): number { return getCurrentTime() } + + getTimer(index: RundownTTimerIndex): IPlaylistTTimer { + return this.#tTimersService.getTimer(index) + } + clearAllTimers(): void { + this.#tTimersService.clearAllTimers() + } } diff --git a/packages/job-worker/src/blueprints/context/RundownActivationContext.ts b/packages/job-worker/src/blueprints/context/RundownActivationContext.ts index a1c6849245..a97d6c7dbc 100644 --- a/packages/job-worker/src/blueprints/context/RundownActivationContext.ts +++ b/packages/job-worker/src/blueprints/context/RundownActivationContext.ts @@ -13,10 +13,14 @@ import { PlayoutModel } from '../../playout/model/PlayoutModel.js' import { RundownEventContext } from './RundownEventContext.js' import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { setTimelineDatastoreValue, removeTimelineDatastoreValue } from '../../playout/datastore.js' +import { TTimersService } from './services/TTimersService.js' +import type { IPlaylistTTimer } from '@sofie-automation/blueprints-integration/dist/context/tTimersContext' +import type { RundownTTimerIndex } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' export class RundownActivationContext extends RundownEventContext implements IRundownActivationContext { private readonly _playoutModel: PlayoutModel private readonly _context: JobContext + readonly #tTimersService: TTimersService private readonly _previousState: IRundownActivationContextState private readonly _currentState: IRundownActivationContextState @@ -43,6 +47,8 @@ export class RundownActivationContext extends RundownEventContext implements IRu this._playoutModel = options.playoutModel this._previousState = options.previousState this._currentState = options.currentState + + this.#tTimersService = new TTimersService(this._playoutModel) } get previousState(): IRundownActivationContextState { @@ -74,4 +80,11 @@ export class RundownActivationContext extends RundownEventContext implements IRu await removeTimelineDatastoreValue(this._context, key) }) } + + getTimer(index: RundownTTimerIndex): IPlaylistTTimer { + return this.#tTimersService.getTimer(index) + } + clearAllTimers(): void { + this.#tTimersService.clearAllTimers() + } } diff --git a/packages/job-worker/src/blueprints/context/__tests__/watchedPackages.test.ts b/packages/job-worker/src/blueprints/context/__tests__/watchedPackages.test.ts new file mode 100644 index 0000000000..a6543c76d9 --- /dev/null +++ b/packages/job-worker/src/blueprints/context/__tests__/watchedPackages.test.ts @@ -0,0 +1,655 @@ +import { setupDefaultJobEnvironment } from '../../../__mocks__/context.js' +import { WatchedPackagesHelper } from '../watchedPackages.js' +import { protectString } from '@sofie-automation/corelib/dist/protectedString' +import { ExpectedPackageDB, ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { PackageInfoDB } from '@sofie-automation/corelib/dist/dataModel/PackageInfos' +import { literal } from '@sofie-automation/corelib/dist/lib' +import { PackageInfo } from '@sofie-automation/blueprints-integration' +import { + ExpectedPackageId, + RundownId, + BucketId, + PeripheralDeviceId, +} from '@sofie-automation/corelib/dist/dataModel/Ids' + +describe('WatchedPackagesHelper', () => { + const mockDeviceId = protectString('device1') + describe('empty', () => { + it('creates an empty helper', () => { + const context = setupDefaultJobEnvironment() + const helper = WatchedPackagesHelper.empty(context) + + expect(helper.hasPackage(protectString('pkg1'))).toBe(false) + expect(helper.getPackageInfo('pkg1')).toEqual([]) + }) + }) + + describe('create', () => { + it('creates helper with no matching packages', async () => { + const context = setupDefaultJobEnvironment() + + const helper = await WatchedPackagesHelper.create(context, protectString('rundown1'), null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + expect(helper.hasPackage(protectString('pkg1'))).toBe(false) + }) + + it('creates helper with packages from rundown', async () => { + const context = setupDefaultJobEnvironment() + const rundownId = protectString('rundown1') + const packageId = protectString('pkg1') + + // Add expected package to the database + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: packageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package1' } as any, + ingestSources: [ + { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + blueprintPackageId: 'package1', + listenToPackageInfoUpdates: true, + } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + // Add package info + await context.mockCollections.PackageInfos.insertOne( + literal({ + _id: protectString('info1'), + studioId: context.studioId, + packageId: packageId, + deviceId: mockDeviceId, + type: PackageInfo.Type.SCAN, + expectedContentVersionHash: 'abc123', + actualContentVersionHash: 'abc123', + payload: {} as any, + }) + ) + + const helper = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + expect(helper.hasPackage(packageId)).toBe(true) + expect(helper.getPackageInfo('package1')).toHaveLength(1) + expect(helper.getPackageInfo('package1')[0].type).toBe(PackageInfo.Type.SCAN) + }) + + it('creates helper with packages from bucket', async () => { + const context = setupDefaultJobEnvironment() + const bucketId = protectString('bucket1') + const packageId = protectString('pkg1') + + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: packageId, + studioId: context.studioId, + rundownId: null, + bucketId: bucketId, + package: { _id: 'package1' } as any, + ingestSources: [ + { fromPieceType: ExpectedPackageDBType.PIECE, pieceId: protectString('piece1') } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + const helper = await WatchedPackagesHelper.create(context, null, bucketId, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + expect(helper.hasPackage(packageId)).toBe(true) + }) + + it('filters packages by ingest source', async () => { + const context = setupDefaultJobEnvironment() + const rundownId = protectString('rundown1') + + // Package with matching source + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: protectString('pkg1'), + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package1' } as any, + ingestSources: [ + { fromPieceType: ExpectedPackageDBType.PIECE, pieceId: protectString('piece1') } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + // Package with non-matching source + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: protectString('pkg2'), + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package2' } as any, + ingestSources: [ + { fromPieceType: ExpectedPackageDBType.PIECE, pieceId: protectString('piece2') } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + const helper = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + expect(helper.hasPackage(protectString('pkg1'))).toBe(true) + expect(helper.hasPackage(protectString('pkg2'))).toBe(false) + }) + + it('splits packages with multiple ingest sources', async () => { + const context = setupDefaultJobEnvironment() + const rundownId = protectString('rundown1') + const packageId = protectString('pkg1') + + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: packageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package1' } as any, + ingestSources: [ + { fromPieceType: ExpectedPackageDBType.PIECE, pieceId: protectString('piece1') } as any, + { fromPieceType: ExpectedPackageDBType.PIECE, pieceId: protectString('piece2') } as any, + ] as any, + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + // Should match both sources + const helper1 = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + expect(helper1.hasPackage(packageId)).toBe(true) + + const helper2 = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece2'), + }) + expect(helper2.hasPackage(packageId)).toBe(true) + }) + + it('does return package info for packages with listenToPackageInfoUpdates: false', async () => { + const context = setupDefaultJobEnvironment() + const rundownId = protectString('rundown1') + const packageId = protectString('pkg1') + + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: packageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package1' } as any, + ingestSources: [ + { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + blueprintPackageId: 'package1', + listenToPackageInfoUpdates: false, + } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + await context.mockCollections.PackageInfos.insertOne( + literal({ + _id: protectString('info1'), + studioId: context.studioId, + packageId: packageId, + deviceId: mockDeviceId, + type: PackageInfo.Type.SCAN, + expectedContentVersionHash: 'abc123', + actualContentVersionHash: 'abc123', + payload: {} as any, + }) + ) + + const helper = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + // Package should still be found (create doesn't filter by listenToPackageInfoUpdates) + expect(helper.hasPackage(packageId)).toBe(true) + // And package info should be available + expect(helper.getPackageInfo('package1')).toHaveLength(1) + }) + + it('handles packages with mixed listenToPackageInfoUpdates in sources', async () => { + const context = setupDefaultJobEnvironment() + const rundownId = protectString('rundown1') + const packageId = protectString('pkg1') + + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: packageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package1' } as any, + ingestSources: [ + { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + blueprintPackageId: 'package1', + listenToPackageInfoUpdates: true, + } as any, + { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece2'), + blueprintPackageId: 'package1', + listenToPackageInfoUpdates: false, + } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + // Helper with source that listens to updates should include the package + const helper1 = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + expect(helper1.hasPackage(packageId)).toBe(true) + + // Helper with source that doesn't listen to updates should also include it + const helper2 = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece2'), + }) + expect(helper2.hasPackage(packageId)).toBe(true) + }) + }) + + describe('filter', () => { + it('filters packages based on predicate', async () => { + const context = setupDefaultJobEnvironment() + const rundownId = protectString('rundown1') + + // Add multiple packages + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: protectString('pkg1'), + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package1' } as any, + ingestSources: [ + { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + blueprintPackageId: 'package1', + listenToPackageInfoUpdates: true, + } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: protectString('pkg2'), + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package2' } as any, + ingestSources: [ + { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + blueprintPackageId: 'package2', + listenToPackageInfoUpdates: true, + } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + const helper = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + // Filter to only keep pkg1 + const filtered = helper.filter(context, (pkg) => pkg.packageId === protectString('pkg1')) + + expect(filtered.hasPackage(protectString('pkg1'))).toBe(true) + expect(filtered.hasPackage(protectString('pkg2'))).toBe(false) + }) + + it('filters package infos along with packages', async () => { + const context = setupDefaultJobEnvironment() + const rundownId = protectString('rundown1') + + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: protectString('pkg1'), + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package1' } as any, + ingestSources: [ + { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + blueprintPackageId: 'package1', + listenToPackageInfoUpdates: true, + } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: protectString('pkg2'), + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package2' } as any, + ingestSources: [ + { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + blueprintPackageId: 'package2', + listenToPackageInfoUpdates: true, + } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + await context.mockCollections.PackageInfos.insertOne( + literal({ + _id: protectString('info1'), + studioId: context.studioId, + packageId: protectString('pkg1'), + deviceId: mockDeviceId, + type: PackageInfo.Type.SCAN, + expectedContentVersionHash: 'abc123', + actualContentVersionHash: 'abc123', + payload: {} as any, + }) + ) + + await context.mockCollections.PackageInfos.insertOne( + literal({ + _id: protectString('info2'), + studioId: context.studioId, + packageId: protectString('pkg2'), + deviceId: mockDeviceId, + type: PackageInfo.Type.SCAN, + expectedContentVersionHash: 'def456', + actualContentVersionHash: 'def456', + payload: {} as any, + }) + ) + + const helper = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + const filtered = helper.filter(context, (pkg) => pkg.packageId === protectString('pkg1')) + + // Should only have info for pkg1 + expect(filtered.getPackageInfo('package1')).toHaveLength(1) + expect(filtered.getPackageInfo('package2')).toHaveLength(0) + }) + }) + + describe('hasPackage', () => { + it('returns true for existing package', async () => { + const context = setupDefaultJobEnvironment() + const rundownId = protectString('rundown1') + const packageId = protectString('pkg1') + + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: packageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package1' } as any, + ingestSources: [ + { fromPieceType: ExpectedPackageDBType.PIECE, pieceId: protectString('piece1') } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + const helper = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + expect(helper.hasPackage(packageId)).toBe(true) + }) + + it('returns false for non-existing package', async () => { + const context = setupDefaultJobEnvironment() + + const helper = await WatchedPackagesHelper.create(context, protectString('rundown1'), null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + expect(helper.hasPackage(protectString('nonexistent'))).toBe(false) + }) + }) + + describe('getPackageInfo', () => { + it('returns empty array for unknown package', async () => { + const context = setupDefaultJobEnvironment() + + const helper = await WatchedPackagesHelper.create(context, protectString('rundown1'), null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + expect(helper.getPackageInfo('unknown')).toEqual([]) + }) + + it('returns package info for known package', async () => { + const context = setupDefaultJobEnvironment() + const rundownId = protectString('rundown1') + const packageId = protectString('pkg1') + + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: packageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package1' } as any, + ingestSources: [ + { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + blueprintPackageId: 'package1', + listenToPackageInfoUpdates: true, + } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + await context.mockCollections.PackageInfos.insertOne( + literal({ + _id: protectString('info1'), + studioId: context.studioId, + packageId: packageId, + deviceId: mockDeviceId, + type: PackageInfo.Type.SCAN, + expectedContentVersionHash: 'abc123', + actualContentVersionHash: 'abc123', + payload: {} as any, + }) + ) + + const helper = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + const infos = helper.getPackageInfo('package1') + expect(infos).toHaveLength(1) + expect(infos[0].type).toBe(PackageInfo.Type.SCAN) + }) + + it('returns multiple package infos for a package', async () => { + const context = setupDefaultJobEnvironment() + const rundownId = protectString('rundown1') + const packageId = protectString('pkg1') + + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: packageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package1' } as any, + ingestSources: [ + { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + blueprintPackageId: 'package1', + listenToPackageInfoUpdates: true, + } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + await context.mockCollections.PackageInfos.insertOne( + literal({ + _id: protectString('info1'), + studioId: context.studioId, + packageId: packageId, + deviceId: mockDeviceId, + type: PackageInfo.Type.SCAN, + expectedContentVersionHash: 'abc123', + actualContentVersionHash: 'abc123', + payload: {} as any, + }) + ) + + await context.mockCollections.PackageInfos.insertOne( + literal({ + _id: protectString('info2'), + studioId: context.studioId, + packageId: packageId, + deviceId: mockDeviceId, + type: PackageInfo.Type.DEEPSCAN, + expectedContentVersionHash: 'abc123', + actualContentVersionHash: 'abc123', + payload: {} as any, + }) + ) + + const helper = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + const infos = helper.getPackageInfo('package1') + expect(infos).toHaveLength(2) + expect(infos.map((i) => i.type)).toContain(PackageInfo.Type.SCAN) + expect(infos.map((i) => i.type)).toContain(PackageInfo.Type.DEEPSCAN) + }) + + it('returns empty array for package with no info', async () => { + const context = setupDefaultJobEnvironment() + const rundownId = protectString('rundown1') + const packageId = protectString('pkg1') + + await context.mockCollections.ExpectedPackages.insertOne( + literal({ + _id: packageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: { _id: 'package1' } as any, + ingestSources: [ + { fromPieceType: ExpectedPackageDBType.PIECE, pieceId: protectString('piece1') } as any, + ], + playoutSources: { + pieceInstanceIds: [], + }, + created: 1000, + }) + ) + + const helper = await WatchedPackagesHelper.create(context, rundownId, null, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('piece1'), + }) + + expect(helper.getPackageInfo('package1')).toEqual([]) + }) + }) +}) diff --git a/packages/job-worker/src/blueprints/context/adlibActions.ts b/packages/job-worker/src/blueprints/context/adlibActions.ts index 3eaaf728b6..26408eb99f 100644 --- a/packages/job-worker/src/blueprints/context/adlibActions.ts +++ b/packages/job-worker/src/blueprints/context/adlibActions.ts @@ -14,7 +14,7 @@ import { TSR, IBlueprintPlayoutDevice, StudioRouteSet, - IBlueprintSegment, + IBlueprintSegmentDB, } from '@sofie-automation/blueprints-integration' import { PartInstanceId, PeripheralDeviceId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { ReadonlyDeep } from 'type-fest' @@ -34,6 +34,9 @@ import { BlueprintQuickLookInfo } from '@sofie-automation/blueprints-integration import { setNextPartFromPart } from '../../playout/setNext.js' import { getOrderedPartsAfterPlayhead } from '../../playout/lookahead/util.js' import { convertPartToBlueprints } from './lib.js' +import { IPlaylistTTimer } from '@sofie-automation/blueprints-integration/dist/context/tTimersContext' +import { TTimersService } from './services/TTimersService.js' +import type { RundownTTimerIndex } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' export class DatastoreActionExecutionContext extends ShowStyleUserContext @@ -66,6 +69,8 @@ export class DatastoreActionExecutionContext /** Actions */ export class ActionExecutionContext extends ShowStyleUserContext implements IActionExecutionContext, IEventContext { + readonly #tTimersService: TTimersService + /** * Whether the blueprints requested a take to be performed at the end of this action * */ @@ -102,6 +107,7 @@ export class ActionExecutionContext extends ShowStyleUserContext implements IAct private readonly partAndPieceInstanceService: PartAndPieceInstanceActionService ) { super(contextInfo, _context, showStyle, watchedPackages) + this.#tTimersService = new TTimersService(_playoutModel) } async getUpcomingParts(limit: number = 5): Promise> { @@ -120,7 +126,7 @@ export class ActionExecutionContext extends ShowStyleUserContext implements IAct return this.partAndPieceInstanceService.getResolvedPieceInstances(part) } - async getSegment(segment: 'current' | 'next'): Promise { + async getSegment(segment: 'current' | 'next'): Promise { return this.partAndPieceInstanceService.getSegment(segment) } @@ -257,4 +263,11 @@ export class ActionExecutionContext extends ShowStyleUserContext implements IAct getCurrentTime(): number { return getCurrentTime() } + + getTimer(index: RundownTTimerIndex): IPlaylistTTimer { + return this.#tTimersService.getTimer(index) + } + clearAllTimers(): void { + this.#tTimersService.clearAllTimers() + } } diff --git a/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts b/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts index f18e2e3a0c..bee62b09eb 100644 --- a/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts +++ b/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts @@ -9,7 +9,7 @@ import { IBlueprintPieceDB, IBlueprintPieceInstance, IBlueprintResolvedPieceInstance, - IBlueprintSegment, + IBlueprintSegmentDB, OmitId, SomeContent, Time, @@ -140,7 +140,7 @@ export class PartAndPieceInstanceActionService { ) return resolvedInstances.map(convertResolvedPieceInstanceToBlueprints) } - getSegment(segment: 'current' | 'next'): IBlueprintSegment | undefined { + getSegment(segment: 'current' | 'next'): IBlueprintSegmentDB | undefined { const partInstance = this.#getPartInstance(segment) if (!partInstance) return undefined diff --git a/packages/job-worker/src/blueprints/context/services/TTimersService.ts b/packages/job-worker/src/blueprints/context/services/TTimersService.ts new file mode 100644 index 0000000000..b8ef3c7e21 --- /dev/null +++ b/packages/job-worker/src/blueprints/context/services/TTimersService.ts @@ -0,0 +1,168 @@ +import type { + IPlaylistTTimer, + IPlaylistTTimerState, +} from '@sofie-automation/blueprints-integration/dist/context/tTimersContext' +import type { RundownTTimer, RundownTTimerIndex } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { assertNever } from '@sofie-automation/corelib/dist/lib' +import type { PlayoutModel } from '../../../playout/model/PlayoutModel.js' +import { ReadonlyDeep } from 'type-fest' +import { + calculateTTimerCurrentTime, + createCountdownTTimer, + createFreeRunTTimer, + createTimeOfDayTTimer, + pauseTTimer, + restartTTimer, + resumeTTimer, + validateTTimerIndex, +} from '../../../playout/tTimers.js' +import { getCurrentTime } from '../../../lib/time.js' + +export class TTimersService { + readonly playoutModel: PlayoutModel + + readonly timers: [PlaylistTTimerImpl, PlaylistTTimerImpl, PlaylistTTimerImpl] + + constructor(playoutModel: PlayoutModel) { + this.playoutModel = playoutModel + + this.timers = [ + new PlaylistTTimerImpl(playoutModel, 1), + new PlaylistTTimerImpl(playoutModel, 2), + new PlaylistTTimerImpl(playoutModel, 3), + ] + } + + getTimer(index: RundownTTimerIndex): IPlaylistTTimer { + validateTTimerIndex(index) + return this.timers[index - 1] + } + clearAllTimers(): void { + for (const timer of this.timers) { + timer.clearTimer() + } + } +} + +export class PlaylistTTimerImpl implements IPlaylistTTimer { + readonly #playoutModel: PlayoutModel + readonly #index: RundownTTimerIndex + + get #modelTimer(): ReadonlyDeep { + return this.#playoutModel.playlist.tTimers[this.#index - 1] + } + + get index(): RundownTTimerIndex { + return this.#modelTimer.index + } + get label(): string { + return this.#modelTimer.label + } + get state(): IPlaylistTTimerState | null { + const rawMode = this.#modelTimer.mode + switch (rawMode?.type) { + case 'countdown': + return { + mode: 'countdown', + currentTime: calculateTTimerCurrentTime(rawMode.startTime, rawMode.pauseTime), + duration: rawMode.duration, + paused: !!rawMode.pauseTime, + stopAtZero: rawMode.stopAtZero, + } + case 'freeRun': + return { + mode: 'freeRun', + currentTime: calculateTTimerCurrentTime(rawMode.startTime, rawMode.pauseTime), + paused: !!rawMode.pauseTime, + } + case 'timeOfDay': + return { + mode: 'timeOfDay', + currentTime: rawMode.targetTime - getCurrentTime(), + targetTime: rawMode.targetTime, + targetRaw: rawMode.targetRaw, + stopAtZero: rawMode.stopAtZero, + } + case undefined: + return null + default: + assertNever(rawMode) + return null + } + } + + constructor(playoutModel: PlayoutModel, index: RundownTTimerIndex) { + this.#playoutModel = playoutModel + this.#index = index + + validateTTimerIndex(index) + } + + setLabel(label: string): void { + this.#playoutModel.updateTTimer({ + ...this.#modelTimer, + label: label, + }) + } + clearTimer(): void { + this.#playoutModel.updateTTimer({ + ...this.#modelTimer, + mode: null, + }) + } + startCountdown(duration: number, options?: { stopAtZero?: boolean; startPaused?: boolean }): void { + this.#playoutModel.updateTTimer({ + ...this.#modelTimer, + mode: createCountdownTTimer(duration, { + stopAtZero: options?.stopAtZero ?? true, + startPaused: options?.startPaused ?? false, + }), + }) + } + startTimeOfDay(targetTime: string | number, options?: { stopAtZero?: boolean }): void { + this.#playoutModel.updateTTimer({ + ...this.#modelTimer, + mode: createTimeOfDayTTimer(targetTime, { + stopAtZero: options?.stopAtZero ?? true, + }), + }) + } + startFreeRun(options?: { startPaused?: boolean }): void { + this.#playoutModel.updateTTimer({ + ...this.#modelTimer, + mode: createFreeRunTTimer({ + startPaused: options?.startPaused ?? false, + }), + }) + } + pause(): boolean { + const newTimer = pauseTTimer(this.#modelTimer.mode) + if (!newTimer) return false + + this.#playoutModel.updateTTimer({ + ...this.#modelTimer, + mode: newTimer, + }) + return true + } + resume(): boolean { + const newTimer = resumeTTimer(this.#modelTimer.mode) + if (!newTimer) return false + + this.#playoutModel.updateTTimer({ + ...this.#modelTimer, + mode: newTimer, + }) + return true + } + restart(): boolean { + const newTimer = restartTTimer(this.#modelTimer.mode) + if (!newTimer) return false + + this.#playoutModel.updateTTimer({ + ...this.#modelTimer, + mode: newTimer, + }) + return true + } +} diff --git a/packages/job-worker/src/blueprints/context/services/__tests__/PartAndPieceInstanceActionService.test.ts b/packages/job-worker/src/blueprints/context/services/__tests__/PartAndPieceInstanceActionService.test.ts index c616e73f51..e8cfda6bb4 100644 --- a/packages/job-worker/src/blueprints/context/services/__tests__/PartAndPieceInstanceActionService.test.ts +++ b/packages/job-worker/src/blueprints/context/services/__tests__/PartAndPieceInstanceActionService.test.ts @@ -1477,6 +1477,7 @@ describe('Test blueprint api context', () => { expect(resultPiece).toEqual(convertPieceInstanceToBlueprints(pieceInstance1.pieceInstance)) const pieceInstance0After = { ...pieceInstance0Before, + neededExpectedPackageIds: [], piece: { ...pieceInstance0Before.piece, ...omit(pieceInstance0Delta, 'badProperty', '_id'), diff --git a/packages/job-worker/src/blueprints/context/services/__tests__/TTimersService.test.ts b/packages/job-worker/src/blueprints/context/services/__tests__/TTimersService.test.ts new file mode 100644 index 0000000000..7943a89592 --- /dev/null +++ b/packages/job-worker/src/blueprints/context/services/__tests__/TTimersService.test.ts @@ -0,0 +1,734 @@ +/* eslint-disable @typescript-eslint/unbound-method */ +import { useFakeCurrentTime, useRealCurrentTime } from '../../../../__mocks__/time.js' +import { TTimersService, PlaylistTTimerImpl } from '../TTimersService.js' +import type { PlayoutModel } from '../../../../playout/model/PlayoutModel.js' +import type { RundownTTimer, RundownTTimerIndex } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import type { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { mock, MockProxy } from 'jest-mock-extended' +import type { ReadonlyDeep } from 'type-fest' + +function createMockPlayoutModel(tTimers: [RundownTTimer, RundownTTimer, RundownTTimer]): MockProxy { + const mockPlayoutModel = mock() + const mockPlaylist = { + tTimers, + } as unknown as ReadonlyDeep + + Object.defineProperty(mockPlayoutModel, 'playlist', { + get: () => mockPlaylist, + configurable: true, + }) + + return mockPlayoutModel +} + +function createEmptyTTimers(): [RundownTTimer, RundownTTimer, RundownTTimer] { + return [ + { index: 1, label: 'Timer 1', mode: null }, + { index: 2, label: 'Timer 2', mode: null }, + { index: 3, label: 'Timer 3', mode: null }, + ] +} + +describe('TTimersService', () => { + beforeEach(() => { + useFakeCurrentTime(10000) + }) + + afterEach(() => { + useRealCurrentTime() + }) + + describe('constructor', () => { + it('should create three timer instances', () => { + const mockPlayoutModel = createMockPlayoutModel(createEmptyTTimers()) + + const service = new TTimersService(mockPlayoutModel) + + expect(service.timers).toHaveLength(3) + expect(service.timers[0]).toBeInstanceOf(PlaylistTTimerImpl) + expect(service.timers[1]).toBeInstanceOf(PlaylistTTimerImpl) + expect(service.timers[2]).toBeInstanceOf(PlaylistTTimerImpl) + }) + }) + + describe('getTimer', () => { + it('should return the correct timer for index 1', () => { + const mockPlayoutModel = createMockPlayoutModel(createEmptyTTimers()) + const service = new TTimersService(mockPlayoutModel) + + const timer = service.getTimer(1) + + expect(timer).toBe(service.timers[0]) + }) + + it('should return the correct timer for index 2', () => { + const mockPlayoutModel = createMockPlayoutModel(createEmptyTTimers()) + const service = new TTimersService(mockPlayoutModel) + + const timer = service.getTimer(2) + + expect(timer).toBe(service.timers[1]) + }) + + it('should return the correct timer for index 3', () => { + const mockPlayoutModel = createMockPlayoutModel(createEmptyTTimers()) + const service = new TTimersService(mockPlayoutModel) + + const timer = service.getTimer(3) + + expect(timer).toBe(service.timers[2]) + }) + + it('should throw for invalid index', () => { + const mockPlayoutModel = createMockPlayoutModel(createEmptyTTimers()) + const service = new TTimersService(mockPlayoutModel) + + expect(() => service.getTimer(0 as RundownTTimerIndex)).toThrow('T-timer index out of range: 0') + expect(() => service.getTimer(4 as RundownTTimerIndex)).toThrow('T-timer index out of range: 4') + }) + }) + + describe('clearAllTimers', () => { + it('should call clearTimer on all timers', () => { + const tTimers = createEmptyTTimers() + tTimers[0].mode = { type: 'freeRun', startTime: 5000, pauseTime: null } + tTimers[1].mode = { type: 'countdown', startTime: 5000, pauseTime: null, duration: 60000, stopAtZero: true } + + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const service = new TTimersService(mockPlayoutModel) + + service.clearAllTimers() + + // updateTTimer should have been called 3 times (once for each timer) + expect(mockPlayoutModel.updateTTimer).toHaveBeenCalledTimes(3) + expect(mockPlayoutModel.updateTTimer).toHaveBeenCalledWith( + expect.objectContaining({ index: 1, mode: null }) + ) + expect(mockPlayoutModel.updateTTimer).toHaveBeenCalledWith( + expect.objectContaining({ index: 2, mode: null }) + ) + expect(mockPlayoutModel.updateTTimer).toHaveBeenCalledWith( + expect.objectContaining({ index: 3, mode: null }) + ) + }) + }) +}) + +describe('PlaylistTTimerImpl', () => { + beforeEach(() => { + useFakeCurrentTime(10000) + }) + + afterEach(() => { + useRealCurrentTime() + }) + + describe('getters', () => { + it('should return the correct index', () => { + const tTimers = createEmptyTTimers() + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 2) + + expect(timer.index).toBe(2) + }) + + it('should return the correct label', () => { + const tTimers = createEmptyTTimers() + tTimers[1].label = 'Custom Label' + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 2) + + expect(timer.label).toBe('Custom Label') + }) + + it('should return null state when no mode is set', () => { + const tTimers = createEmptyTTimers() + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + expect(timer.state).toBeNull() + }) + + it('should return running freeRun state', () => { + const tTimers = createEmptyTTimers() + tTimers[0].mode = { type: 'freeRun', startTime: 5000, pauseTime: null } + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + expect(timer.state).toEqual({ + mode: 'freeRun', + currentTime: 5000, // 10000 - 5000 + paused: false, // pauseTime is null = running + }) + }) + + it('should return paused freeRun state', () => { + const tTimers = createEmptyTTimers() + tTimers[0].mode = { type: 'freeRun', startTime: 5000, pauseTime: 8000 } + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + expect(timer.state).toEqual({ + mode: 'freeRun', + currentTime: 3000, // 8000 - 5000 + paused: true, // pauseTime is set = paused + }) + }) + + it('should return running countdown state', () => { + const tTimers = createEmptyTTimers() + tTimers[0].mode = { + type: 'countdown', + startTime: 5000, + pauseTime: null, + duration: 60000, + stopAtZero: true, + } + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + expect(timer.state).toEqual({ + mode: 'countdown', + currentTime: 5000, // 10000 - 5000 + duration: 60000, + paused: false, // pauseTime is null = running + stopAtZero: true, + }) + }) + + it('should return paused countdown state', () => { + const tTimers = createEmptyTTimers() + tTimers[0].mode = { + type: 'countdown', + startTime: 5000, + pauseTime: 7000, + duration: 60000, + stopAtZero: false, + } + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + expect(timer.state).toEqual({ + mode: 'countdown', + currentTime: 2000, // 7000 - 5000 + duration: 60000, + paused: true, // pauseTime is set = paused + stopAtZero: false, + }) + }) + + it('should return timeOfDay state', () => { + const tTimers = createEmptyTTimers() + tTimers[0].mode = { + type: 'timeOfDay', + targetTime: 20000, // 10 seconds in the future + targetRaw: '15:30', + stopAtZero: true, + } + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + expect(timer.state).toEqual({ + mode: 'timeOfDay', + currentTime: 10000, // targetTime - getCurrentTime() = 20000 - 10000 + targetTime: 20000, + targetRaw: '15:30', + stopAtZero: true, + }) + }) + + it('should return timeOfDay state with numeric targetRaw', () => { + const tTimers = createEmptyTTimers() + const targetTimestamp = 1737331200000 + tTimers[0].mode = { + type: 'timeOfDay', + targetTime: targetTimestamp, + targetRaw: targetTimestamp, + stopAtZero: false, + } + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + expect(timer.state).toEqual({ + mode: 'timeOfDay', + currentTime: targetTimestamp - 10000, // targetTime - getCurrentTime() + targetTime: targetTimestamp, + targetRaw: targetTimestamp, + stopAtZero: false, + }) + }) + }) + + describe('setLabel', () => { + it('should update the label', () => { + const tTimers = createEmptyTTimers() + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + timer.setLabel('New Label') + + expect(mockPlayoutModel.updateTTimer).toHaveBeenCalledWith({ + index: 1, + label: 'New Label', + mode: null, + }) + }) + }) + + describe('clearTimer', () => { + it('should clear the timer mode', () => { + const tTimers = createEmptyTTimers() + tTimers[0].mode = { type: 'freeRun', startTime: 5000, pauseTime: null } + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + timer.clearTimer() + + expect(mockPlayoutModel.updateTTimer).toHaveBeenCalledWith({ + index: 1, + label: 'Timer 1', + mode: null, + }) + }) + }) + + describe('startCountdown', () => { + it('should start a running countdown with default options', () => { + const tTimers = createEmptyTTimers() + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + timer.startCountdown(60000) + + expect(mockPlayoutModel.updateTTimer).toHaveBeenCalledWith({ + index: 1, + label: 'Timer 1', + mode: { + type: 'countdown', + startTime: 10000, + pauseTime: null, + duration: 60000, + stopAtZero: true, + }, + }) + }) + + it('should start a paused countdown', () => { + const tTimers = createEmptyTTimers() + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + timer.startCountdown(30000, { startPaused: true, stopAtZero: false }) + + expect(mockPlayoutModel.updateTTimer).toHaveBeenCalledWith({ + index: 1, + label: 'Timer 1', + mode: { + type: 'countdown', + startTime: 10000, + pauseTime: 10000, + duration: 30000, + stopAtZero: false, + }, + }) + }) + }) + + describe('startFreeRun', () => { + it('should start a running free-run timer', () => { + const tTimers = createEmptyTTimers() + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + timer.startFreeRun() + + expect(mockPlayoutModel.updateTTimer).toHaveBeenCalledWith({ + index: 1, + label: 'Timer 1', + mode: { + type: 'freeRun', + startTime: 10000, + pauseTime: null, + }, + }) + }) + + it('should start a paused free-run timer', () => { + const tTimers = createEmptyTTimers() + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + timer.startFreeRun({ startPaused: true }) + + expect(mockPlayoutModel.updateTTimer).toHaveBeenCalledWith({ + index: 1, + label: 'Timer 1', + mode: { + type: 'freeRun', + startTime: 10000, + pauseTime: 10000, + }, + }) + }) + }) + + describe('startTimeOfDay', () => { + it('should start a timeOfDay timer with time string', () => { + const tTimers = createEmptyTTimers() + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + timer.startTimeOfDay('15:30') + + expect(mockPlayoutModel.updateTTimer).toHaveBeenCalledWith({ + index: 1, + label: 'Timer 1', + mode: { + type: 'timeOfDay', + targetTime: expect.any(Number), // new target time + targetRaw: '15:30', + stopAtZero: true, + }, + }) + }) + + it('should start a timeOfDay timer with numeric timestamp', () => { + const tTimers = createEmptyTTimers() + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + const targetTimestamp = 1737331200000 + + timer.startTimeOfDay(targetTimestamp) + + expect(mockPlayoutModel.updateTTimer).toHaveBeenCalledWith({ + index: 1, + label: 'Timer 1', + mode: { + type: 'timeOfDay', + targetTime: targetTimestamp, + targetRaw: targetTimestamp, + stopAtZero: true, + }, + }) + }) + + it('should start a timeOfDay timer with stopAtZero false', () => { + const tTimers = createEmptyTTimers() + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + timer.startTimeOfDay('18:00', { stopAtZero: false }) + + expect(mockPlayoutModel.updateTTimer).toHaveBeenCalledWith({ + index: 1, + label: 'Timer 1', + mode: expect.objectContaining({ + type: 'timeOfDay', + targetRaw: '18:00', + stopAtZero: false, + }), + }) + }) + + it('should start a timeOfDay timer with 12-hour format', () => { + const tTimers = createEmptyTTimers() + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + timer.startTimeOfDay('5:30pm') + + expect(mockPlayoutModel.updateTTimer).toHaveBeenCalledWith({ + index: 1, + label: 'Timer 1', + mode: expect.objectContaining({ + type: 'timeOfDay', + targetTime: expect.any(Number), // new target time + targetRaw: '5:30pm', + stopAtZero: true, + }), + }) + }) + + it('should throw for invalid time string', () => { + const tTimers = createEmptyTTimers() + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + expect(() => timer.startTimeOfDay('invalid')).toThrow('Unable to parse target time for timeOfDay T-timer') + }) + + it('should throw for empty time string', () => { + const tTimers = createEmptyTTimers() + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + expect(() => timer.startTimeOfDay('')).toThrow('Unable to parse target time for timeOfDay T-timer') + }) + }) + + describe('pause', () => { + it('should pause a running freeRun timer', () => { + const tTimers = createEmptyTTimers() + tTimers[0].mode = { type: 'freeRun', startTime: 5000, pauseTime: null } + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + const result = timer.pause() + + expect(result).toBe(true) + expect(mockPlayoutModel.updateTTimer).toHaveBeenCalledWith({ + index: 1, + label: 'Timer 1', + mode: { + type: 'freeRun', + startTime: 5000, + pauseTime: 10000, + }, + }) + }) + + it('should pause a running countdown timer', () => { + const tTimers = createEmptyTTimers() + tTimers[0].mode = { type: 'countdown', startTime: 5000, pauseTime: null, duration: 60000, stopAtZero: true } + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + const result = timer.pause() + + expect(result).toBe(true) + expect(mockPlayoutModel.updateTTimer).toHaveBeenCalledWith({ + index: 1, + label: 'Timer 1', + mode: { + type: 'countdown', + startTime: 5000, + pauseTime: 10000, + duration: 60000, + stopAtZero: true, + }, + }) + }) + + it('should return false for timer with no mode', () => { + const tTimers = createEmptyTTimers() + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + const result = timer.pause() + + expect(result).toBe(false) + expect(mockPlayoutModel.updateTTimer).not.toHaveBeenCalled() + }) + + it('should return false for timeOfDay timer (does not support pause)', () => { + const tTimers = createEmptyTTimers() + tTimers[0].mode = { + type: 'timeOfDay', + targetTime: 20000, + targetRaw: '15:30', + stopAtZero: true, + } + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + const result = timer.pause() + + expect(result).toBe(false) + expect(mockPlayoutModel.updateTTimer).not.toHaveBeenCalled() + }) + }) + + describe('resume', () => { + it('should resume a paused freeRun timer', () => { + const tTimers = createEmptyTTimers() + tTimers[0].mode = { type: 'freeRun', startTime: 5000, pauseTime: 8000 } + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + const result = timer.resume() + + expect(result).toBe(true) + expect(mockPlayoutModel.updateTTimer).toHaveBeenCalledWith({ + index: 1, + label: 'Timer 1', + mode: { + type: 'freeRun', + startTime: 7000, // adjusted for pause duration + pauseTime: null, + }, + }) + }) + + it('should return true but not change a running timer', () => { + const tTimers = createEmptyTTimers() + tTimers[0].mode = { type: 'freeRun', startTime: 5000, pauseTime: null } + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + const result = timer.resume() + + // Returns true because timer supports resume, but it's already running + expect(result).toBe(true) + expect(mockPlayoutModel.updateTTimer).toHaveBeenCalled() + }) + + it('should return false for timer with no mode', () => { + const tTimers = createEmptyTTimers() + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + const result = timer.resume() + + expect(result).toBe(false) + expect(mockPlayoutModel.updateTTimer).not.toHaveBeenCalled() + }) + + it('should return false for timeOfDay timer (does not support resume)', () => { + const tTimers = createEmptyTTimers() + tTimers[0].mode = { + type: 'timeOfDay', + targetTime: 20000, + targetRaw: '15:30', + stopAtZero: true, + } + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + const result = timer.resume() + + expect(result).toBe(false) + expect(mockPlayoutModel.updateTTimer).not.toHaveBeenCalled() + }) + }) + + describe('restart', () => { + it('should restart a countdown timer', () => { + const tTimers = createEmptyTTimers() + tTimers[0].mode = { type: 'countdown', startTime: 5000, pauseTime: null, duration: 60000, stopAtZero: true } + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + const result = timer.restart() + + expect(result).toBe(true) + expect(mockPlayoutModel.updateTTimer).toHaveBeenCalledWith({ + index: 1, + label: 'Timer 1', + mode: { + type: 'countdown', + startTime: 10000, // reset to now + pauseTime: null, + duration: 60000, + stopAtZero: true, + }, + }) + }) + + it('should restart a paused countdown timer (stays paused)', () => { + const tTimers = createEmptyTTimers() + tTimers[0].mode = { + type: 'countdown', + startTime: 5000, + pauseTime: 8000, + duration: 60000, + stopAtZero: false, + } + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + const result = timer.restart() + + expect(result).toBe(true) + expect(mockPlayoutModel.updateTTimer).toHaveBeenCalledWith({ + index: 1, + label: 'Timer 1', + mode: { + type: 'countdown', + startTime: 10000, + pauseTime: 10000, // also reset to now (paused at start) + duration: 60000, + stopAtZero: false, + }, + }) + }) + + it('should return false for freeRun timer', () => { + const tTimers = createEmptyTTimers() + tTimers[0].mode = { type: 'freeRun', startTime: 5000, pauseTime: null } + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + const result = timer.restart() + + expect(result).toBe(false) + expect(mockPlayoutModel.updateTTimer).not.toHaveBeenCalled() + }) + + it('should restart a timeOfDay timer with valid targetRaw', () => { + const tTimers = createEmptyTTimers() + tTimers[0].mode = { + type: 'timeOfDay', + targetTime: 5000, // old target time + targetRaw: '15:30', + stopAtZero: true, + } + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + const result = timer.restart() + + expect(result).toBe(true) + expect(mockPlayoutModel.updateTTimer).toHaveBeenCalledWith({ + index: 1, + label: 'Timer 1', + mode: { + type: 'timeOfDay', + targetTime: expect.any(Number), // new target time + targetRaw: '15:30', + stopAtZero: true, + }, + }) + }) + + it('should return false for timeOfDay timer with invalid targetRaw', () => { + const tTimers = createEmptyTTimers() + tTimers[0].mode = { + type: 'timeOfDay', + targetTime: 5000, + targetRaw: 'invalid-time-string', + stopAtZero: true, + } + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + const result = timer.restart() + + expect(result).toBe(false) + expect(mockPlayoutModel.updateTTimer).not.toHaveBeenCalled() + }) + + it('should return false for timer with no mode', () => { + const tTimers = createEmptyTTimers() + const mockPlayoutModel = createMockPlayoutModel(tTimers) + const timer = new PlaylistTTimerImpl(mockPlayoutModel, 1) + + const result = timer.restart() + + expect(result).toBe(false) + expect(mockPlayoutModel.updateTTimer).not.toHaveBeenCalled() + }) + }) + + describe('constructor validation', () => { + it('should throw for invalid index', () => { + const mockPlayoutModel = createMockPlayoutModel(createEmptyTTimers()) + + expect(() => new PlaylistTTimerImpl(mockPlayoutModel, 0 as RundownTTimerIndex)).toThrow( + 'T-timer index out of range: 0' + ) + expect(() => new PlaylistTTimerImpl(mockPlayoutModel, 4 as RundownTTimerIndex)).toThrow( + 'T-timer index out of range: 4' + ) + }) + }) +}) diff --git a/packages/job-worker/src/blueprints/context/watchedPackages.ts b/packages/job-worker/src/blueprints/context/watchedPackages.ts index 29d1e8901f..7c690f45c0 100644 --- a/packages/job-worker/src/blueprints/context/watchedPackages.ts +++ b/packages/job-worker/src/blueprints/context/watchedPackages.ts @@ -1,29 +1,31 @@ -import { - ExpectedPackageDB, - ExpectedPackageDBBase, - ExpectedPackageFromRundown, -} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { PackageInfoDB } from '@sofie-automation/corelib/dist/dataModel/PackageInfos' import { JobContext } from '../../jobs/index.js' -import { ExpectedPackageId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { BucketId, ExpectedPackageId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { Filter as FilterQuery } from 'mongodb' import { PackageInfo } from '@sofie-automation/blueprints-integration' import { unprotectObjectArray } from '@sofie-automation/corelib/dist/protectedString' -import { ExpectedPackageForIngestModel, IngestModelReadonly } from '../../ingest/model/IngestModel.js' +import { IngestModelReadonly } from '../../ingest/model/IngestModel.js' import { ReadonlyDeep } from 'type-fest' +import type { IngestExpectedPackage } from '../../ingest/model/IngestExpectedPackage.js' +import type { ExpectedPackageIngestSource } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' /** * This is a helper class to simplify exposing packageInfo to various places in the blueprints */ export class WatchedPackagesHelper { - private readonly packages = new Map>() + private readonly packages = new Map< + ExpectedPackageId, + ReadonlyDeep>[] + >() private constructor( - packages: ReadonlyDeep, + packages: ReadonlyDeep[]>, private readonly packageInfos: ReadonlyDeep ) { for (const pkg of packages) { - this.packages.set(pkg._id, pkg) + const arr = this.packages.get(pkg.packageId) || [] + arr.push(pkg) + this.packages.set(pkg.packageId, arr) } } @@ -39,21 +41,41 @@ export class WatchedPackagesHelper { * @param studioId The studio this is for * @param filter A mongo query to specify the packages that should be included */ - static async create( + static async create( context: JobContext, - filter: FilterQuery> + rundownId: RundownId | null, + bucketId: BucketId | null, + filterIngestSources: FilterQuery ): Promise { // Load all the packages and the infos that are watched const watchedPackages = await context.directCollections.ExpectedPackages.findFetch({ - ...filter, studioId: context.studioId, - } as any) // TODO: don't use any here + rundownId: rundownId, + bucketId: bucketId, + ingestSources: { + $elemMatch: filterIngestSources, + }, + }) const watchedPackageInfos = await context.directCollections.PackageInfos.findFetch({ studioId: context.studioId, packageId: { $in: watchedPackages.map((p) => p._id) }, }) - return new WatchedPackagesHelper(watchedPackages, watchedPackageInfos) + const watchedIngestPackages: IngestExpectedPackage[] = watchedPackages.flatMap( + (expectedPackage) => { + // Split into a package per source + return expectedPackage.ingestSources.map( + (source) => + ({ + packageId: expectedPackage._id, + package: expectedPackage.package, + source: source, + }) satisfies IngestExpectedPackage + ) + } + ) + + return new WatchedPackagesHelper(watchedIngestPackages, watchedPackageInfos) } /** @@ -65,7 +87,7 @@ export class WatchedPackagesHelper { context: JobContext, ingestModel: IngestModelReadonly ): Promise { - const packages: ReadonlyDeep[] = [] + const packages: ReadonlyDeep[] = [] packages.push(...ingestModel.expectedPackagesForRundownBaseline) @@ -77,7 +99,7 @@ export class WatchedPackagesHelper { return this.#createFromPackages( context, - packages.filter((pkg) => !!pkg.listenToPackageInfoUpdates) + packages.filter((pkg) => !!pkg.source.listenToPackageInfoUpdates) ) } @@ -92,7 +114,7 @@ export class WatchedPackagesHelper { ingestModel: IngestModelReadonly, segmentExternalIds: string[] ): Promise { - const packages: ReadonlyDeep[] = [] + const packages: ReadonlyDeep[] = [] for (const externalId of segmentExternalIds) { const segment = ingestModel.getSegmentByExternalId(externalId) @@ -105,17 +127,17 @@ export class WatchedPackagesHelper { return this.#createFromPackages( context, - packages.filter((pkg) => !!pkg.listenToPackageInfoUpdates) + packages.filter((pkg) => !!pkg.source.listenToPackageInfoUpdates) ) } - static async #createFromPackages(context: JobContext, packages: ReadonlyDeep[]) { + static async #createFromPackages(context: JobContext, packages: ReadonlyDeep[]) { // Load all the packages and the infos that are watched const watchedPackageInfos = packages.length > 0 ? await context.directCollections.PackageInfos.findFetch({ studioId: context.studio._id, - packageId: { $in: packages.map((p) => p._id) }, + packageId: { $in: packages.map((p) => p.packageId) }, }) : [] @@ -124,30 +146,41 @@ export class WatchedPackagesHelper { /** * Create a new helper with a subset of the data in the current helper. - * This is useful so that all the data for a rundown can be loaded at the start of an ingest operation, and then subsets can be taken for particular blueprint methods without needing to do more db operations. + * This is useful so that all the data for a rundown can be loaded at the start of an ingest operation, + * and then subsets can be taken for particular blueprint methods without needing to do more db operations. * @param func A filter to check if each package should be included */ - filter(_context: JobContext, func: (pkg: ReadonlyDeep) => boolean): WatchedPackagesHelper { - const watchedPackages: ReadonlyDeep[] = [] - for (const pkg of this.packages.values()) { - if (func(pkg)) watchedPackages.push(pkg) + filter( + _context: JobContext, + func: (pkg: ReadonlyDeep>) => boolean + ): WatchedPackagesHelper { + const watchedPackages: ReadonlyDeep>[] = [] + for (const packages of this.packages.values()) { + for (const pkg of packages) { + if (func(pkg)) watchedPackages.push(pkg) + } } - const newPackageIds = new Set(watchedPackages.map((p) => p._id)) + const newPackageIds = new Set(watchedPackages.map((p) => p.packageId)) const watchedPackageInfos = this.packageInfos.filter((info) => newPackageIds.has(info.packageId)) return new WatchedPackagesHelper(watchedPackages, watchedPackageInfos) } - getPackage(packageId: ExpectedPackageId): ReadonlyDeep | undefined { - return this.packages.get(packageId) + hasPackage(packageId: ExpectedPackageId): boolean { + return this.packages.has(packageId) } - getPackageInfo(packageId: string): Readonly> { - for (const pkg of this.packages.values()) { - if (pkg.blueprintPackageId === packageId) { - const info = this.packageInfos.filter((p) => p.packageId === pkg._id) - return unprotectObjectArray(info) + getPackageInfo(blueprintPackageId: string): Readonly> { + // Perhaps this should do some scoped source checks, but this should not be necessary. + // The caller should be ensuring that this helper has been filtered to only contain relevant packages + for (const packages of this.packages.values()) { + for (const pkg of packages) { + // Note: This finds the first package with the same blueprintPackageId. There could be multiple if the blueprints don't respect the uniqueness rules. + if (pkg.source.blueprintPackageId === blueprintPackageId) { + const info = this.packageInfos.filter((p) => p.packageId === pkg.packageId) + return unprotectObjectArray(info) + } } } diff --git a/packages/job-worker/src/blueprints/postProcess.ts b/packages/job-worker/src/blueprints/postProcess.ts index 5c17bb1a3c..e3d6075fea 100644 --- a/packages/job-worker/src/blueprints/postProcess.ts +++ b/packages/job-worker/src/blueprints/postProcess.ts @@ -42,7 +42,7 @@ import { interpollateTranslation, wrapTranslatableMessageFromBlueprints, } from '@sofie-automation/corelib/dist/TranslatableMessage' -import { setDefaultIdOnExpectedPackages } from '../ingest/expectedPackages.js' +import { sanitiseExpectedPackages } from '../ingest/expectedPackages.js' import { logger } from '../logging.js' import { validateTimeline } from 'superfly-timeline' import { ReadonlyDeep } from 'type-fest' @@ -137,8 +137,8 @@ export function postProcessPieces( ) piece.timelineObjectsString = serializePieceTimelineObjectsBlob(timelineObjects) - // Fill in ids of unnamed expectedPackages - setDefaultIdOnExpectedPackages(piece.expectedPackages) + // Fill in contentVersionHash of expectedPackages + sanitiseExpectedPackages(piece.expectedPackages) return piece }) @@ -267,8 +267,8 @@ export function postProcessAdLibPieces( ) piece.timelineObjectsString = serializePieceTimelineObjectsBlob(timelineObjects) - // Fill in ids of unnamed expectedPackages - setDefaultIdOnExpectedPackages(piece.expectedPackages) + // Fill in contentVersionHash of expectedPackages + sanitiseExpectedPackages(piece.expectedPackages) return piece }) @@ -304,8 +304,8 @@ export function postProcessGlobalAdLibActions( `${rundownId}_${blueprintId}_global_adlib_action_${action.externalId}` ) - // Fill in ids of unnamed expectedPackages - setDefaultIdOnExpectedPackages(action.expectedPackages) + // Fill in contentVersionHash of expectedPackages + sanitiseExpectedPackages(action.expectedPackages) return literal({ ...action, @@ -345,8 +345,8 @@ export function postProcessAdLibActions( `${rundownId}_${blueprintId}_${partId}_adlib_action_${action.externalId}` ) - // Fill in ids of unnamed expectedPackages - setDefaultIdOnExpectedPackages(action.expectedPackages) + // Fill in contentVersionHash of expectedPackages + sanitiseExpectedPackages(action.expectedPackages) return literal({ ...action, @@ -428,8 +428,8 @@ export function postProcessGlobalPieces( ) piece.timelineObjectsString = serializePieceTimelineObjectsBlob(timelineObjects) - // Fill in ids of unnamed expectedPackages - setDefaultIdOnExpectedPackages(piece.expectedPackages) + // Fill in contentVersionHash of expectedPackages + sanitiseExpectedPackages(piece.expectedPackages) return piece }) @@ -504,8 +504,8 @@ export function postProcessBucketAdLib( name: name || itemOrig.name, timelineObjectsString: EmptyPieceTimelineObjectsBlob, } - // Fill in ids of unnamed expectedPackages - setDefaultIdOnExpectedPackages(piece.expectedPackages) + // Fill in contentVersionHash of expectedPackages + sanitiseExpectedPackages(piece.expectedPackages) const timelineObjects = postProcessTimelineObjects(piece._id, blueprintId, itemOrig.content.timelineObjects) piece.timelineObjectsString = serializePieceTimelineObjectsBlob(timelineObjects) @@ -553,8 +553,8 @@ export function postProcessBucketAction( ...processAdLibActionITranslatableMessages(itemOrig, blueprintId, rank, label), } - // Fill in ids of unnamed expectedPackages - setDefaultIdOnExpectedPackages(action.expectedPackages) + // Fill in contentVersionHash of expectedPackages + sanitiseExpectedPackages(action.expectedPackages) return action } diff --git a/packages/job-worker/src/events/__tests__/externalMessageQueue.test.ts b/packages/job-worker/src/events/__tests__/externalMessageQueue.test.ts index 7631c647c5..a39d82f7cc 100644 --- a/packages/job-worker/src/events/__tests__/externalMessageQueue.test.ts +++ b/packages/job-worker/src/events/__tests__/externalMessageQueue.test.ts @@ -56,6 +56,11 @@ describe('Test external message queue static methods', () => { type: PlaylistTimingType.None, }, rundownIdsInOrder: [protectString('rundown_1')], + tTimers: [ + { index: 1, label: '', mode: null }, + { index: 2, label: '', mode: null }, + { index: 3, label: '', mode: null }, + ], }) await context.mockCollections.Rundowns.insertOne({ _id: protectString('rundown_1'), @@ -201,6 +206,11 @@ describe('Test sending messages to mocked endpoints', () => { type: PlaylistTimingType.None, }, rundownIdsInOrder: [protectString('rundown_1')], + tTimers: [ + { index: 1, label: '', mode: null }, + { index: 2, label: '', mode: null }, + { index: 3, label: '', mode: null }, + ], }) const rundown = (await context.mockCollections.Rundowns.findOne(rundownId)) as DBRundown diff --git a/packages/job-worker/src/ingest/__tests__/expectedPackages.test.ts b/packages/job-worker/src/ingest/__tests__/expectedPackages.test.ts index 8b5cb99a12..d85cbbcb7c 100644 --- a/packages/job-worker/src/ingest/__tests__/expectedPackages.test.ts +++ b/packages/job-worker/src/ingest/__tests__/expectedPackages.test.ts @@ -6,12 +6,12 @@ import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { defaultPart, defaultPiece, defaultAdLibPiece } from '../../__mocks__/defaultCollectionObjects.js' import { LAYER_IDS } from '../../__mocks__/presetCollections.js' import { ExpectedPackage, PieceLifespan, VTContent } from '@sofie-automation/blueprints-integration' -import { updateExpectedPackagesForPartModel } from '../expectedPackages.js' +import { updateExpectedMediaAndPlayoutItemsForPartModel } from '../expectedPackages.js' import { MockJobContext, setupDefaultJobEnvironment } from '../../__mocks__/context.js' import { ReadonlyDeep } from 'type-fest' import { IngestPartModel } from '../model/IngestPartModel.js' -describe('Expected Media Items', () => { +describe('Expected Playout Items', () => { let context: MockJobContext beforeAll(async () => { context = setupDefaultJobEnvironment() @@ -111,9 +111,8 @@ describe('Expected Media Items', () => { return { part, pieces, adLibPieces } } - test('Generates ExpectedPackages for a Part', async () => { + test('Generates for a Part', async () => { const setExpectedPlayoutItems = jest.fn() - const setExpectedPackages = jest.fn() const { part, pieces, adLibPieces } = getMockPartContent() @@ -126,16 +125,12 @@ describe('Expected Media Items', () => { expectedPackages: [], setExpectedPlayoutItems, - setExpectedPackages, setInvalid: function (_invalid: boolean): void { throw new Error('Function not implemented.') }, } - updateExpectedPackagesForPartModel(context, partModel) - - expect(setExpectedPackages).toHaveBeenCalledTimes(1) - expect(setExpectedPackages.mock.calls[0][0]).toHaveLength(4) + updateExpectedMediaAndPlayoutItemsForPartModel(context, partModel) expect(setExpectedPlayoutItems).toHaveBeenCalledTimes(1) expect(setExpectedPlayoutItems).toHaveBeenCalledWith([]) diff --git a/packages/job-worker/src/ingest/__tests__/syncChangesToPartInstance.test.ts b/packages/job-worker/src/ingest/__tests__/syncChangesToPartInstance.test.ts index b663ad3501..47ddfed664 100644 --- a/packages/job-worker/src/ingest/__tests__/syncChangesToPartInstance.test.ts +++ b/packages/job-worker/src/ingest/__tests__/syncChangesToPartInstance.test.ts @@ -315,6 +315,11 @@ describe('SyncChangesToPartInstancesWorker', () => { modified: 0, timing: { type: PlaylistTimingType.None }, rundownIdsInOrder: [], + tTimers: [ + { index: 1, label: '', mode: null }, + { index: 2, label: '', mode: null }, + { index: 3, label: '', mode: null }, + ], } const segmentModel = new PlayoutSegmentModelImpl(segment, [part0]) diff --git a/packages/job-worker/src/ingest/__tests__/updateNext.test.ts b/packages/job-worker/src/ingest/__tests__/updateNext.test.ts index b92cbe7766..91df4cc24e 100644 --- a/packages/job-worker/src/ingest/__tests__/updateNext.test.ts +++ b/packages/job-worker/src/ingest/__tests__/updateNext.test.ts @@ -34,6 +34,11 @@ async function createMockRO(context: MockJobContext): Promise { }, rundownIdsInOrder: [rundownId], + tTimers: [ + { index: 1, label: '', mode: null }, + { index: 2, label: '', mode: null }, + { index: 3, label: '', mode: null }, + ], }) await context.mockCollections.Rundowns.insertOne({ diff --git a/packages/job-worker/src/ingest/bucket/bucketAdlibs.ts b/packages/job-worker/src/ingest/bucket/bucketAdlibs.ts index 65c5973dcc..8217bd46b0 100644 --- a/packages/job-worker/src/ingest/bucket/bucketAdlibs.ts +++ b/packages/job-worker/src/ingest/bucket/bucketAdlibs.ts @@ -10,14 +10,12 @@ import { } from '@sofie-automation/corelib/dist/worker/ingest' import { cleanUpExpectedPackagesForBucketAdLibs, - cleanUpExpectedPackagesForBucketAdLibsActions, updateExpectedPackagesForBucketAdLibPiece, updateExpectedPackagesForBucketAdLibAction, } from '../expectedPackages.js' import { omit } from '@sofie-automation/corelib/dist/lib' import { BucketAdLib } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibPiece' import { BucketAdLibAction } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibAction' -import { ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { MongoQuery } from '../../db/index.js' export async function handleBucketRemoveAdlibPiece( @@ -34,7 +32,7 @@ export async function handleBucketRemoveAdlibPiece( await Promise.all([ context.directCollections.BucketAdLibPieces.remove({ _id: { $in: idsToUpdate } }), - cleanUpExpectedPackagesForBucketAdLibs(context, idsToUpdate), + cleanUpExpectedPackagesForBucketAdLibs(context, piece.bucketId, idsToUpdate), ]) } @@ -52,7 +50,7 @@ export async function handleBucketRemoveAdlibAction( await Promise.all([ context.directCollections.BucketAdLibActions.remove({ _id: { $in: idsToUpdate } }), - cleanUpExpectedPackagesForBucketAdLibsActions(context, idsToUpdate), + cleanUpExpectedPackagesForBucketAdLibs(context, action.bucketId, idsToUpdate), ]) } @@ -64,12 +62,6 @@ export async function handleBucketEmpty(context: JobContext, data: BucketEmptyPr context.directCollections.BucketAdLibActions.remove({ bucketId: id, studioId: context.studioId }), context.directCollections.ExpectedPackages.remove({ studioId: context.studioId, - fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB, - bucketId: id, - }), - context.directCollections.ExpectedPackages.remove({ - studioId: context.studioId, - fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION, bucketId: id, }), ]) diff --git a/packages/job-worker/src/ingest/bucket/import.ts b/packages/job-worker/src/ingest/bucket/import.ts index 569bd05e78..8d6ca518ca 100644 --- a/packages/job-worker/src/ingest/bucket/import.ts +++ b/packages/job-worker/src/ingest/bucket/import.ts @@ -12,7 +12,6 @@ import { getSystemVersion } from '../../lib/index.js' import { BucketItemImportProps, BucketItemRegenerateProps } from '@sofie-automation/corelib/dist/worker/ingest' import { cleanUpExpectedPackagesForBucketAdLibs, - cleanUpExpectedPackagesForBucketAdLibsActions, updateExpectedPackagesForBucketAdLibPiece, updateExpectedPackagesForBucketAdLibAction, } from '../expectedPackages.js' @@ -155,7 +154,13 @@ async function regenerateBucketItemFromIngestInfo( if (!showStyleCompound) throw new Error(`Unable to create a ShowStyleCompound for ${showStyleBase._id}, ${showStyleVariant._id} `) - const rawAdlib = await generateBucketAdlibForVariant(context, blueprint, showStyleCompound, ingestInfo.payload) + const rawAdlib = await generateBucketAdlibForVariant( + context, + blueprint, + showStyleCompound, + bucketId, + ingestInfo.payload + ) if (rawAdlib) { const importVersions: RundownImportVersions = { @@ -229,7 +234,7 @@ async function regenerateBucketItemFromIngestInfo( const adlibIdsToRemoveArray = Array.from(adlibIdsToRemove) ps.push( - cleanUpExpectedPackagesForBucketAdLibs(context, adlibIdsToRemoveArray), + cleanUpExpectedPackagesForBucketAdLibs(context, bucketId, adlibIdsToRemoveArray), context.directCollections.BucketAdLibPieces.remove({ _id: { $in: adlibIdsToRemoveArray } }) ) } @@ -237,7 +242,7 @@ async function regenerateBucketItemFromIngestInfo( const actionIdsToRemoveArray = Array.from(actionIdsToRemove) ps.push( - cleanUpExpectedPackagesForBucketAdLibsActions(context, actionIdsToRemoveArray), + cleanUpExpectedPackagesForBucketAdLibs(context, bucketId, actionIdsToRemoveArray), context.directCollections.BucketAdLibActions.remove({ _id: { $in: actionIdsToRemoveArray } }) ) } @@ -248,17 +253,18 @@ async function generateBucketAdlibForVariant( context: JobContext, blueprint: ReadonlyDeep, showStyleCompound: ReadonlyDeep, + bucketId: BucketId, // pieceId: BucketAdLibId | BucketAdLibActionId, payload: IngestAdlib ): Promise { if (!blueprint.blueprint.getAdlibItem) return null - const watchedPackages = await WatchedPackagesHelper.create(context, { - // We don't know what the `pieceId` will be, but we do know the `externalId` - pieceExternalId: payload.externalId, + const watchedPackages = await WatchedPackagesHelper.create(context, null, bucketId, { fromPieceType: { $in: [ExpectedPackageDBType.BUCKET_ADLIB, ExpectedPackageDBType.BUCKET_ADLIB_ACTION], }, + // We don't know what the `pieceId` will be, but we do know the `externalId` + pieceExternalId: payload.externalId, }) const contextForVariant = new ShowStyleUserContext( diff --git a/packages/job-worker/src/ingest/commit.ts b/packages/job-worker/src/ingest/commit.ts index c017861d36..47e26f850c 100644 --- a/packages/job-worker/src/ingest/commit.ts +++ b/packages/job-worker/src/ingest/commit.ts @@ -19,7 +19,7 @@ import { removeRundownFromDb, } from '../rundownPlaylists.js' import { ReadonlyDeep } from 'type-fest' -import { IngestModel, IngestModelReadonly } from './model/IngestModel.js' +import { IngestDatabasePersistedModel, IngestModel, IngestModelReadonly } from './model/IngestModel.js' import { JobContext } from '../jobs/index.js' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' @@ -40,7 +40,6 @@ import { PlayoutRundownModelImpl } from '../playout/model/implementation/Playout import { PlayoutSegmentModelImpl } from '../playout/model/implementation/PlayoutSegmentModelImpl.js' import { createPlayoutModelFromIngestModel } from '../playout/model/implementation/LoadPlayoutModel.js' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' -import { DatabasePersistedModel } from '../modelBase.js' import { updateSegmentIdsForAdlibbedPartInstances } from './commit/updateSegmentIdsForAdlibbedPartInstances.js' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' import { AnyBulkWriteOperation } from 'mongodb' @@ -64,7 +63,7 @@ interface PlaylistIdPair { */ export async function CommitIngestOperation( context: JobContext, - ingestModel: IngestModel & DatabasePersistedModel, + ingestModel: IngestModel & IngestDatabasePersistedModel, beforeRundown: ReadonlyDeep | undefined, beforePartMap: BeforeIngestOperationPartMap, data: ReadonlyDeep @@ -223,7 +222,7 @@ export async function CommitIngestOperation( ) // Start the save - const pSaveIngest = ingestModel.saveAllToDatabase() + const pSaveIngest = ingestModel.saveAllToDatabase(playlistLock) pSaveIngest.catch(() => null) // Ensure promise isn't reported as unhandled await validateAdlibTestingSegment(context, playoutModel) diff --git a/packages/job-worker/src/ingest/expectedPackages.ts b/packages/job-worker/src/ingest/expectedPackages.ts index b49d9e993e..4a2479d23f 100644 --- a/packages/job-worker/src/ingest/expectedPackages.ts +++ b/packages/job-worker/src/ingest/expectedPackages.ts @@ -1,36 +1,14 @@ -import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' -import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' import { BucketAdLibAction } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibAction' import { BucketAdLib } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibPiece' import { ExpectedPackageDBType, - ExpectedPackageDBFromPiece, - ExpectedPackageDBFromBaselineAdLibPiece, - ExpectedPackageDBFromAdLibAction, - ExpectedPackageDBFromBaselineAdLibAction, - ExpectedPackageDBFromBucketAdLib, - ExpectedPackageDBFromBucketAdLibAction, - ExpectedPackageDBBase, - ExpectedPackageDBFromRundownBaselineObjects, - ExpectedPackageDBFromStudioBaselineObjects, - getContentVersionHash, + ExpectedPackageDB, + ExpectedPackageIngestSource, getExpectedPackageId, - ExpectedPackageFromRundown, + ExpectedPackageIngestSourceBucketAdlibAction, + ExpectedPackageIngestSourceBucketAdlibPiece, } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' -import { - SegmentId, - RundownId, - AdLibActionId, - PieceId, - RundownBaselineAdLibActionId, - BucketAdLibActionId, - BucketAdLibId, - StudioId, -} from '@sofie-automation/corelib/dist/dataModel/Ids' -import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' -import { RundownBaselineAdLibAction } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibAction' -import { RundownBaselineAdLibItem } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibPiece' -import { saveIntoDb } from '../db/changes.js' +import { BucketId, BucketAdLibId, BucketAdLibActionId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { PlayoutModel } from '../playout/model/PlayoutModel.js' import { StudioPlayoutModel } from '../studio/model/StudioPlayoutModel.js' import { ReadonlyDeep } from 'type-fest' @@ -41,336 +19,247 @@ import { updateExpectedPlayoutItemsForRundownBaseline, } from './expectedPlayoutItems.js' import { JobContext, JobStudio } from '../jobs/index.js' -import { ExpectedPackageForIngestModelBaseline, IngestModel } from './model/IngestModel.js' +import { IngestModel } from './model/IngestModel.js' import { IngestPartModel } from './model/IngestPartModel.js' -import { clone } from '@sofie-automation/corelib/dist/lib' +import { hashObj } from '@sofie-automation/corelib/dist/lib' +import { AnyBulkWriteOperation } from 'mongodb' -export function updateExpectedPackagesForPartModel(context: JobContext, part: IngestPartModel): void { +export function updateExpectedMediaAndPlayoutItemsForPartModel(context: JobContext, part: IngestPartModel): void { updateExpectedPlayoutItemsForPartModel(context, part) - - const expectedPackages: ExpectedPackageFromRundown[] = [ - ...generateExpectedPackagesForPiece( - context.studio, - part.part.rundownId, - part.part.segmentId, - part.pieces, - ExpectedPackageDBType.PIECE - ), - ...generateExpectedPackagesForPiece( - context.studio, - part.part.rundownId, - part.part.segmentId, - part.adLibPieces, - ExpectedPackageDBType.ADLIB_PIECE - ), - ...generateExpectedPackagesForAdlibAction( - context.studio, - part.part.rundownId, - part.part.segmentId, - part.adLibActions - ), - ] - - part.setExpectedPackages(expectedPackages) } -export async function updateExpectedPackagesForRundownBaseline( +export async function updateExpectedMediaAndPlayoutItemsForRundownBaseline( context: JobContext, ingestModel: IngestModel, - baseline: BlueprintResultBaseline | undefined, - forceBaseline = false + baseline: BlueprintResultBaseline | undefined ): Promise { await updateExpectedPlayoutItemsForRundownBaseline(context, ingestModel, baseline) +} - const expectedPackages: ExpectedPackageForIngestModelBaseline[] = [] - - const preserveTypesDuringSave = new Set() +function generateExpectedPackagesForBucketAdlib(studio: ReadonlyDeep, adlib: BucketAdLib) { + const packages: ExpectedPackageDB[] = [] - // Only regenerate the baseline types if they are already loaded into memory - // If the data isn't already loaded, then we haven't made any changes to the baseline adlibs - // This means we can skip regenerating them as it is guaranteed there will be no changes - const baselineAdlibPieceCache = forceBaseline - ? await ingestModel.rundownBaselineAdLibPieces.get() - : ingestModel.rundownBaselineAdLibPieces.getIfLoaded() - if (baselineAdlibPieceCache) { - expectedPackages.push( - ...generateExpectedPackagesForBaselineAdlibPiece( - context.studio, - ingestModel.rundownId, - baselineAdlibPieceCache - ) - ) - } else { - // We haven't regenerated anything, so preserve the values in the save - preserveTypesDuringSave.add(ExpectedPackageDBType.BASELINE_ADLIB_PIECE) - } - const baselineAdlibActionCache = forceBaseline - ? await ingestModel.rundownBaselineAdLibActions.get() - : ingestModel.rundownBaselineAdLibActions.getIfLoaded() - if (baselineAdlibActionCache) { - expectedPackages.push( - ...generateExpectedPackagesForBaselineAdlibAction( - context.studio, - ingestModel.rundownId, - baselineAdlibActionCache + if (adlib.expectedPackages) { + packages.push( + ...generateBucketExpectedPackages( + studio, + adlib.bucketId, + { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB, + pieceId: adlib._id, + pieceExternalId: adlib.externalId, + }, + adlib.expectedPackages ) ) - } else { - // We haven't regenerated anything, so preserve the values in the save - preserveTypesDuringSave.add(ExpectedPackageDBType.BASELINE_ADLIB_ACTION) - } - - if (baseline) { - // Fill in ids of unnamed expectedPackages - setDefaultIdOnExpectedPackages(baseline.expectedPackages) - - const bases = generateExpectedPackageBases( - context.studio, - ingestModel.rundownId, - baseline.expectedPackages ?? [] - ) - - expectedPackages.push( - ...bases.map((item): ExpectedPackageDBFromRundownBaselineObjects => { - return { - ...item, - fromPieceType: ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS, - rundownId: ingestModel.rundownId, - pieceId: null, - } - }) - ) - } else { - // We haven't regenerated anything, so preserve the values in the save - preserveTypesDuringSave.add(ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS) - } - - // Add expected packages for global pieces - for (const piece of ingestModel.getGlobalPieces()) { - if (piece.expectedPackages) { - const bases = generateExpectedPackageBases(context.studio, piece._id, piece.expectedPackages) - for (const base of bases) { - expectedPackages.push({ - ...base, - rundownId: ingestModel.rundownId, - pieceId: piece._id, - fromPieceType: ExpectedPackageDBType.BASELINE_PIECE, - }) - } - } } - // Preserve anything existing - for (const expectedPackage of ingestModel.expectedPackagesForRundownBaseline) { - if (preserveTypesDuringSave.has(expectedPackage.fromPieceType)) { - expectedPackages.push(clone(expectedPackage)) - } - } - - ingestModel.setExpectedPackagesForRundownBaseline(expectedPackages) -} - -function generateExpectedPackagesForPiece( - studio: ReadonlyDeep, - rundownId: RundownId, - segmentId: SegmentId, - pieces: ReadonlyDeep[], - type: ExpectedPackageDBType.PIECE | ExpectedPackageDBType.ADLIB_PIECE -) { - const packages: ExpectedPackageDBFromPiece[] = [] - for (const piece of pieces) { - const partId = 'startPartId' in piece ? piece.startPartId : piece.partId - if (piece.expectedPackages && partId) { - const bases = generateExpectedPackageBases(studio, piece._id, piece.expectedPackages) - for (const base of bases) { - packages.push({ - ...base, - rundownId, - segmentId, - partId, - pieceId: piece._id, - fromPieceType: type, - }) - } - } - } - return packages -} -function generateExpectedPackagesForBaselineAdlibPiece( - studio: ReadonlyDeep, - rundownId: RundownId, - pieces: ReadonlyDeep -) { - const packages: ExpectedPackageDBFromBaselineAdLibPiece[] = [] - for (const piece of pieces) { - if (piece.expectedPackages) { - const bases = generateExpectedPackageBases(studio, piece._id, piece.expectedPackages) - for (const base of bases) { - packages.push({ - ...base, - rundownId, - pieceId: piece._id, - fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_PIECE, - }) - } - } - } - return packages -} -function generateExpectedPackagesForAdlibAction( - studio: ReadonlyDeep, - rundownId: RundownId, - segmentId: SegmentId, - actions: ReadonlyDeep -) { - const packages: ExpectedPackageDBFromAdLibAction[] = [] - for (const action of actions) { - if (action.expectedPackages) { - const bases = generateExpectedPackageBases(studio, action._id, action.expectedPackages) - for (const base of bases) { - packages.push({ - ...base, - rundownId, - segmentId, - partId: action.partId, - pieceId: action._id, - fromPieceType: ExpectedPackageDBType.ADLIB_ACTION, - }) - } - } - } - return packages -} -function generateExpectedPackagesForBaselineAdlibAction( - studio: ReadonlyDeep, - rundownId: RundownId, - actions: ReadonlyDeep -) { - const packages: ExpectedPackageDBFromBaselineAdLibAction[] = [] - for (const action of actions) { - if (action.expectedPackages) { - const bases = generateExpectedPackageBases(studio, action._id, action.expectedPackages) - for (const base of bases) { - packages.push({ - ...base, - rundownId, - pieceId: action._id, - fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_ACTION, - }) - } - } - } return packages } -function generateExpectedPackagesForBucketAdlib(studio: ReadonlyDeep, adlibs: BucketAdLib[]) { - const packages: ExpectedPackageDBFromBucketAdLib[] = [] - for (const adlib of adlibs) { - if (adlib.expectedPackages) { - const bases = generateExpectedPackageBases(studio, adlib._id, adlib.expectedPackages) - for (const base of bases) { - packages.push({ - ...base, - bucketId: adlib.bucketId, - pieceId: adlib._id, - pieceExternalId: adlib.externalId, - fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB, - }) - } - } - } - return packages -} -function generateExpectedPackagesForBucketAdlibAction( - studio: ReadonlyDeep, - adlibActions: BucketAdLibAction[] -) { - const packages: ExpectedPackageDBFromBucketAdLibAction[] = [] - for (const action of adlibActions) { - if (action.expectedPackages) { - const bases = generateExpectedPackageBases(studio, action._id, action.expectedPackages) - for (const base of bases) { - packages.push({ - ...base, - bucketId: action.bucketId, +function generateExpectedPackagesForBucketAdlibAction(studio: ReadonlyDeep, action: BucketAdLibAction) { + const packages: ExpectedPackageDB[] = [] + + if (action.expectedPackages) { + packages.push( + ...generateBucketExpectedPackages( + studio, + action.bucketId, + { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION, pieceId: action._id, pieceExternalId: action.externalId, - fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION, - }) - } - } + }, + action.expectedPackages + ) + ) } + return packages } -function generateExpectedPackageBases( +function generateBucketExpectedPackages( studio: ReadonlyDeep, - ownerId: - | PieceId - | AdLibActionId - | RundownBaselineAdLibActionId - | BucketAdLibId - | BucketAdLibActionId - | RundownId - | StudioId, + bucketId: BucketId, + source: Omit, expectedPackages: ReadonlyDeep -) { - const bases: Omit[] = [] +): ExpectedPackageDB[] { + const bases: ExpectedPackageDB[] = [] for (let i = 0; i < expectedPackages.length; i++) { const expectedPackage = expectedPackages[i] - const id = expectedPackage._id || '__unnamed' + i + + const fullPackage: ReadonlyDeep = { + ...expectedPackage, + _id: expectedPackage._id || '__unnamed' + i, + } bases.push({ - ...clone(expectedPackage), - _id: getExpectedPackageId(ownerId, id), - blueprintPackageId: id, - contentVersionHash: getContentVersionHash(expectedPackage), + _id: getExpectedPackageId(bucketId, fullPackage), + package: fullPackage, studioId: studio._id, - created: Date.now(), + rundownId: null, + bucketId: bucketId, + created: Date.now(), // This will be preserved during the save if needed + ingestSources: [ + { + ...(source as any), // Because this is a generic, this spread doesnt work + blueprintPackageId: expectedPackage._id, + listenToPackageInfoUpdates: expectedPackage.listenToPackageInfoUpdates, + }, + ], + playoutSources: { + // These don't belong to a rundown, so can't be referenced by playout + pieceInstanceIds: [], + }, }) } + return bases } +async function writeUpdatedExpectedPackages( + context: JobContext, + bucketId: BucketId, + documentsToSave: ExpectedPackageDB[], + matchSource: Partial +): Promise { + const writeOps: AnyBulkWriteOperation[] = [] + + const documentIdsToSave = documentsToSave.map((doc) => doc._id) + + // Find which documents already exist in the database + // It would be nice to avoid this, but that would make the update operation incredibly complex + // There is no risk of race conditions, as bucket packages are only modified in the ingest job worker + const existingDocIds = new Set( + ( + await context.directCollections.ExpectedPackages.findFetch( + { + _id: { $in: documentIdsToSave }, + studioId: context.studioId, + bucketId: bucketId, + }, + { + projection: { + _id: 1, + }, + } + ) + ).map((doc) => doc._id) + ) + + for (const doc of documentsToSave) { + if (existingDocIds.has(doc._id)) { + // Document already exists, perform an update to merge the source into the existing document + writeOps.push({ + updateOne: { + filter: { + _id: doc._id, + ingestSources: { + // This is pretty messy, but we need to make sure that we don't add the same source twice + $not: { + $elemMatch: matchSource, + }, + }, + }, + update: { + $addToSet: { + ingestSources: doc.ingestSources[0], + }, + }, + }, + }) + } else { + // Perform a simple insert + writeOps.push({ + insertOne: { + document: doc, + }, + }) + } + } + + // Remove any old references from this source + writeOps.push({ + updateMany: { + filter: { + studioId: context.studioId, + bucketId: bucketId, + _id: { $nin: documentIdsToSave }, + }, + update: { + $pull: { + ingestSources: matchSource, + }, + }, + }, + }) + + await context.directCollections.ExpectedPackages.bulkWrite(writeOps) + + // Check for any packages that no longer have any sources + await cleanUpUnusedPackagesInBucket(context, bucketId) +} + export async function updateExpectedPackagesForBucketAdLibPiece( context: JobContext, adlib: BucketAdLib ): Promise { - const packages = generateExpectedPackagesForBucketAdlib(context.studio, [adlib]) + const documentsToSave = generateExpectedPackagesForBucketAdlib(context.studio, adlib) - await saveIntoDb(context, context.directCollections.ExpectedPackages, { pieceId: adlib._id }, packages) + await writeUpdatedExpectedPackages(context, adlib.bucketId, documentsToSave, { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB, + pieceId: adlib._id, + }) } export async function updateExpectedPackagesForBucketAdLibAction( context: JobContext, action: BucketAdLibAction ): Promise { - const packages = generateExpectedPackagesForBucketAdlibAction(context.studio, [action]) + const documentsToSave = generateExpectedPackagesForBucketAdlibAction(context.studio, action) - await saveIntoDb(context, context.directCollections.ExpectedPackages, { pieceId: action._id }, packages) + await writeUpdatedExpectedPackages(context, action.bucketId, documentsToSave, { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION, + pieceId: action._id, + }) } + export async function cleanUpExpectedPackagesForBucketAdLibs( context: JobContext, - adLibIds: BucketAdLibId[] + bucketId: BucketId, + adLibIds: Array ): Promise { if (adLibIds.length > 0) { - await context.directCollections.ExpectedPackages.remove({ - pieceId: { - $in: adLibIds, + // Remove the claim for the adlibs from any expected packages in the db + await context.directCollections.ExpectedPackages.update( + { + studioId: context.studioId, + bucketId: bucketId, + // Note: this could have the ingestSources match, but that feels excessive as the $pull performs the same check }, - }) + { + $pull: { + ingestSources: { + fromPieceType: { + $in: [ExpectedPackageDBType.BUCKET_ADLIB, ExpectedPackageDBType.BUCKET_ADLIB_ACTION], + }, + pieceId: { $in: adLibIds }, + } as any, // This cast isn't nice, but is needed for some reason + }, + } + ) + + // Remove any expected packages that have now have no owners + await cleanUpUnusedPackagesInBucket(context, bucketId) } } -export async function cleanUpExpectedPackagesForBucketAdLibsActions( - context: JobContext, - adLibIds: BucketAdLibActionId[] -): Promise { - if (adLibIds.length > 0) { - await context.directCollections.ExpectedPackages.remove({ - pieceId: { - $in: adLibIds, - }, - }) - } + +async function cleanUpUnusedPackagesInBucket(context: JobContext, bucketId: BucketId) { + await context.directCollections.ExpectedPackages.remove({ + studioId: context.studioId, + bucketId: bucketId, + ingestSources: { $size: 0 }, + // Future: these currently can't be referenced by playoutSources, but they could be in the future + }) } export function updateBaselineExpectedPackagesOnStudio( @@ -380,29 +269,21 @@ export function updateBaselineExpectedPackagesOnStudio( ): void { updateBaselineExpectedPlayoutItemsOnStudio(context, playoutModel, baseline.expectedPlayoutItems ?? []) - // Fill in ids of unnamed expectedPackages - setDefaultIdOnExpectedPackages(baseline.expectedPackages) - - const bases = generateExpectedPackageBases(context.studio, context.studio._id, baseline.expectedPackages ?? []) - playoutModel.setExpectedPackagesForStudioBaseline( - bases.map((item): ExpectedPackageDBFromStudioBaselineObjects => { - return { - ...item, - fromPieceType: ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS, - pieceId: null, - } - }) - ) + playoutModel.setExpectedPackagesForStudioBaseline(baseline.expectedPackages ?? []) } -export function setDefaultIdOnExpectedPackages(expectedPackages: ExpectedPackage.Any[] | undefined): void { - // Fill in ids of unnamed expectedPackage +export function sanitiseExpectedPackages(expectedPackages: ExpectedPackage.Any[] | undefined): void { if (expectedPackages) { - for (let i = 0; i < expectedPackages.length; i++) { - const expectedPackage = expectedPackages[i] - if (!expectedPackage._id) { - expectedPackage._id = `__index${i}` - } + for (const expectedPackage of expectedPackages) { + expectedPackage.contentVersionHash = getContentVersionHash(expectedPackage) } } } + +function getContentVersionHash(expectedPackage: ReadonlyDeep>): string { + return hashObj({ + content: expectedPackage.content, + version: expectedPackage.version, + // todo: should expectedPackage.sources.containerId be here as well? + }) +} diff --git a/packages/job-worker/src/ingest/generationRundown.ts b/packages/job-worker/src/ingest/generationRundown.ts index 17b0bd4931..113f6dfb42 100644 --- a/packages/job-worker/src/ingest/generationRundown.ts +++ b/packages/job-worker/src/ingest/generationRundown.ts @@ -1,7 +1,14 @@ -import { ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' -import { BlueprintId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { + ExpectedPackageDBType, + ExpectedPackageIngestSourceBaselineAdlibAction, + ExpectedPackageIngestSourceBaselineAdlibPiece, + ExpectedPackageIngestSourceBaselineObjects, + ExpectedPackageIngestSourceBaselinePiece, + ExpectedPackageIngestSourceRundownBaseline, +} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { BlueprintId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { RundownNote } from '@sofie-automation/corelib/dist/dataModel/Notes' -import { serializePieceTimelineObjectsBlob } from '@sofie-automation/corelib/dist/dataModel/Piece' +import { Piece, serializePieceTimelineObjectsBlob } from '@sofie-automation/corelib/dist/dataModel/Piece' import { DBRundown, RundownSource } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { literal } from '@sofie-automation/corelib/dist/lib' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' @@ -21,13 +28,20 @@ import { extendIngestRundownCore, canRundownBeUpdated } from './lib.js' import { JobContext } from '../jobs/index.js' import { CommitIngestData } from './lock.js' import { SelectedShowStyleVariant, selectShowStyleVariant } from './selectShowStyleVariant.js' -import { updateExpectedPackagesForRundownBaseline } from './expectedPackages.js' +import { updateExpectedMediaAndPlayoutItemsForRundownBaseline } from './expectedPackages.js' import { ReadonlyDeep } from 'type-fest' -import { BlueprintResultRundown, ExtendedIngestRundown } from '@sofie-automation/blueprints-integration' +import { + BlueprintResultRundown, + ExpectedPackage, + ExtendedIngestRundown, +} from '@sofie-automation/blueprints-integration' import { wrapTranslatableMessageFromBlueprints } from '@sofie-automation/corelib/dist/TranslatableMessage' import { convertRundownToBlueprintSegmentRundown, translateUserEditsFromBlueprint } from '../blueprints/context/lib.js' import { calculateSegmentsAndRemovalsFromIngestData } from './generationSegment.js' import { SofieIngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' +import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' +import { RundownBaselineAdLibAction } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibAction' +import { ExpectedPackageCollector, IngestExpectedPackage } from './model/IngestExpectedPackage.js' export enum GenerateRundownMode { Create = 'create', @@ -207,8 +221,8 @@ export async function regenerateRundownAndBaselineFromIngestData( const rundownBaselinePackages = allRundownWatchedPackages.filter( context, (pkg) => - pkg.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_ACTION || - pkg.fromPieceType === ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS + pkg.source.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_ACTION || + pkg.source.fromPieceType === ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS ) const blueprintContext = new GetRundownContext( @@ -321,9 +335,59 @@ export async function regenerateRundownAndBaselineFromIngestData( dbRundown._id ) - await ingestModel.setRundownBaseline(timelineObjectsBlob, adlibPieces, adlibActions, globalPieces) + const expectedPackages = generateExpectedPackagesForBaseline( + dbRundown._id, + adlibPieces, + adlibActions, + globalPieces, + rundownRes.baseline.expectedPackages ?? [] + ) + + await ingestModel.setRundownBaseline(timelineObjectsBlob, adlibPieces, adlibActions, globalPieces, expectedPackages) - await updateExpectedPackagesForRundownBaseline(context, ingestModel, rundownRes.baseline) + await updateExpectedMediaAndPlayoutItemsForRundownBaseline(context, ingestModel, rundownRes.baseline) return dbRundown } + +function generateExpectedPackagesForBaseline( + rundownId: RundownId, + adLibPieces: AdLibPiece[], + adLibActions: RundownBaselineAdLibAction[], + globalPieces: Piece[], + expectedPackages: ExpectedPackage.Any[] +): IngestExpectedPackage[] { + const collector = new ExpectedPackageCollector(rundownId) + + // This expects to generate multiple documents with the same packageId, these get deduplicated during saving. + // This should only concern itself with avoiding duplicates with the same source + + collector.addPackagesWithSource(expectedPackages, { + fromPieceType: ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS, + }) + + // Populate the ingestSources + for (const piece of adLibPieces) { + if (piece.expectedPackages) + collector.addPackagesWithSource(piece.expectedPackages, { + fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_PIECE, + pieceId: piece._id, + }) + } + for (const piece of adLibActions) { + if (piece.expectedPackages) + collector.addPackagesWithSource(piece.expectedPackages, { + fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_ACTION, + pieceId: piece._id, + }) + } + for (const piece of globalPieces) { + if (piece.expectedPackages) + collector.addPackagesWithSource(piece.expectedPackages, { + fromPieceType: ExpectedPackageDBType.BASELINE_PIECE, + pieceId: piece._id, + }) + } + + return collector.finish() +} diff --git a/packages/job-worker/src/ingest/generationSegment.ts b/packages/job-worker/src/ingest/generationSegment.ts index 5b679eebe8..e3583e8515 100644 --- a/packages/job-worker/src/ingest/generationSegment.ts +++ b/packages/job-worker/src/ingest/generationSegment.ts @@ -19,7 +19,7 @@ import { SofieIngestSegment, } from '@sofie-automation/blueprints-integration' import { wrapTranslatableMessageFromBlueprints } from '@sofie-automation/corelib/dist/TranslatableMessage' -import { updateExpectedPackagesForPartModel } from './expectedPackages.js' +import { updateExpectedMediaAndPlayoutItemsForPartModel } from './expectedPackages.js' import { IngestReplacePartType, IngestSegmentModel } from './model/IngestSegmentModel.js' import { ReadonlyDeep } from 'type-fest' import { Rundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' @@ -107,7 +107,7 @@ async function regenerateSegmentAndUpdateModelFull( const segmentId = ingestModel.getSegmentIdFromExternalId(ingestSegment.externalId) const segmentWatchedPackages = allRundownWatchedPackages.filter( context, - (p) => 'segmentId' in p && p.segmentId === segmentId + (p) => 'segmentId' in p.source && p.source.segmentId === segmentId ) let updatedSegmentModel = await regenerateSegmentAndUpdateModel( @@ -191,11 +191,10 @@ async function checkIfSegmentReferencesUnloadedPackageInfos( // check if there are any updates right away? for (const part of segmentModel.parts) { for (const expectedPackage of part.expectedPackages) { - if (expectedPackage.listenToPackageInfoUpdates) { - const loadedPackage = segmentWatchedPackages.getPackage(expectedPackage._id) - if (!loadedPackage) { + if (expectedPackage.source.listenToPackageInfoUpdates) { + if (!segmentWatchedPackages.hasPackage(expectedPackage.packageId)) { // The package didn't exist prior to the blueprint running - expectedPackageIdsToCheck.add(expectedPackage._id) + expectedPackageIdsToCheck.add(expectedPackage.packageId) } } } @@ -411,7 +410,7 @@ function updateModelWithGeneratedPart( ) const partModel = segmentModel.replacePart(part, processedPieces, adlibPieces, adlibActions) - updateExpectedPackagesForPartModel(context, partModel) + updateExpectedMediaAndPlayoutItemsForPartModel(context, partModel) } /** diff --git a/packages/job-worker/src/ingest/model/IngestExpectedPackage.ts b/packages/job-worker/src/ingest/model/IngestExpectedPackage.ts new file mode 100644 index 0000000000..631039368f --- /dev/null +++ b/packages/job-worker/src/ingest/model/IngestExpectedPackage.ts @@ -0,0 +1,66 @@ +import type { ExpectedPackage } from '@sofie-automation/blueprints-integration' +import { + getExpectedPackageId, + type ExpectedPackageDBType, + type ExpectedPackageIngestSourcePart, + type ExpectedPackageIngestSourceRundownBaseline, +} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import type { BucketId, ExpectedPackageId, RundownId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import type { ReadonlyDeep } from 'type-fest' + +/** + * A simpler form of ExpectedPackageDB that is scoped to the properties relevant to ingest. + * This is limited to be owned by one source, during the save process the documents will be merged + */ +export interface IngestExpectedPackage< + TPackageSource extends { fromPieceType: ExpectedPackageDBType } = + | ExpectedPackageIngestSourcePart + | ExpectedPackageIngestSourceRundownBaseline, +> { + packageId: ExpectedPackageId + + package: ReadonlyDeep> + + source: TPackageSource +} + +export class ExpectedPackageCollector { + readonly #parentId: RundownId | StudioId | BucketId + readonly #packages: IngestExpectedPackage[] = [] + + constructor(parentId: RundownId | StudioId | BucketId) { + this.#parentId = parentId + } + + addPackagesWithSource = ( // never to force the caller to specify the type + expectedPackages: ReadonlyDeep[], + source: Omit + ): void => { + const insertedPackagesForSource = new Set() + for (const expectedPackage of expectedPackages) { + const id = getExpectedPackageId(this.#parentId, expectedPackage) + + // Deduplicate with an id including the blueprintPackageId. + // This is to ensure the blueprints can reference the package with that id still + const uniqueId = `${id}-${expectedPackage._id}-${expectedPackage.listenToPackageInfoUpdates ?? false}` + + // Ensure only inserted once for this source + if (insertedPackagesForSource.has(uniqueId)) continue + insertedPackagesForSource.add(uniqueId) + + this.#packages.push({ + packageId: id, + package: expectedPackage, + source: { + ...(source as any), // Because this is a generic, this spread doesnt work + blueprintPackageId: expectedPackage._id, + listenToPackageInfoUpdates: expectedPackage.listenToPackageInfoUpdates, + }, + }) + } + } + + finish(): IngestExpectedPackage[] { + return this.#packages + } +} diff --git a/packages/job-worker/src/ingest/model/IngestModel.ts b/packages/job-worker/src/ingest/model/IngestModel.ts index 942095330d..946237b857 100644 --- a/packages/job-worker/src/ingest/model/IngestModel.ts +++ b/packages/job-worker/src/ingest/model/IngestModel.ts @@ -1,10 +1,4 @@ -import { - ExpectedPackageDBFromBaselineAdLibAction, - ExpectedPackageDBFromBaselineAdLibPiece, - ExpectedPackageDBFromBaselinePiece, - ExpectedPackageDBFromRundownBaselineObjects, - ExpectedPackageFromRundown, -} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import type { ExpectedPackageIngestSource } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItemRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' import { ExpectedPackageId, @@ -19,7 +13,7 @@ import { CoreUserEditingDefinition } from '@sofie-automation/corelib/dist/dataMo import { RundownBaselineAdLibAction } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibAction' import { RundownBaselineAdLibItem } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibPiece' import { LazyInitialiseReadonly } from '../../lib/lazy.js' -import { RundownLock } from '../../jobs/lock.js' +import type { PlaylistLock, RundownLock } from '../../jobs/lock.js' import { IngestSegmentModel, IngestSegmentModelReadonly } from './IngestSegmentModel.js' import { IngestPartModel, IngestPartModelReadonly } from './IngestPartModel.js' import { ReadonlyDeep } from 'type-fest' @@ -32,13 +26,7 @@ import { ProcessedShowStyleBase, ProcessedShowStyleVariant } from '../../jobs/sh import { WrappedShowStyleBlueprint } from '../../blueprints/cache.js' import { IBlueprintRundown } from '@sofie-automation/blueprints-integration' import type { INotificationsModel } from '../../notifications/NotificationsModel.js' - -export type ExpectedPackageForIngestModelBaseline = - | ExpectedPackageDBFromBaselineAdLibAction - | ExpectedPackageDBFromBaselineAdLibPiece - | ExpectedPackageDBFromRundownBaselineObjects - | ExpectedPackageDBFromBaselinePiece -export type ExpectedPackageForIngestModel = ExpectedPackageFromRundown | ExpectedPackageForIngestModelBaseline +import type { IngestExpectedPackage } from './IngestExpectedPackage.js' export interface IngestModelReadonly { /** @@ -62,7 +50,7 @@ export interface IngestModelReadonly { /** * The ExpectedPackages for the baseline of this Rundown */ - readonly expectedPackagesForRundownBaseline: ReadonlyDeep[] + readonly expectedPackagesForRundownBaseline: ReadonlyDeep[] /** * The baseline Timeline objects of this Rundown @@ -147,7 +135,7 @@ export interface IngestModelReadonly { * Search for an ExpectedPackage through the whole Rundown * @param id Id of the ExpectedPackage */ - findExpectedPackage(packageId: ExpectedPackageId): ReadonlyDeep | undefined + findExpectedPackageIngestSources(packageId: ExpectedPackageId): ReadonlyDeep[] } export interface IngestModel extends IngestModelReadonly, BaseModel, INotificationsModel { @@ -209,12 +197,6 @@ export interface IngestModel extends IngestModelReadonly, BaseModel, INotificati */ setExpectedPlayoutItemsForRundownBaseline(expectedPlayoutItems: ExpectedPlayoutItemRundown[]): void - /** - * Set the ExpectedPackages for the baseline of this Rundown - * @param expectedPackages The new ExpectedPackages - */ - setExpectedPackagesForRundownBaseline(expectedPackages: ExpectedPackageForIngestModelBaseline[]): void - /** * Set the data for this Rundown. * This will either update or create the Rundown @@ -246,7 +228,8 @@ export interface IngestModel extends IngestModelReadonly, BaseModel, INotificati timelineObjectsBlob: PieceTimelineObjectsBlob, adlibPieces: RundownBaselineAdLibItem[], adlibActions: RundownBaselineAdLibAction[], - pieces: Piece[] + pieces: Piece[], + expectedPackages: IngestExpectedPackage[] ): Promise /** @@ -271,3 +254,10 @@ export interface IngestModel extends IngestModelReadonly, BaseModel, INotificati } export type IngestReplaceSegmentType = Omit + +export interface IngestDatabasePersistedModel { + /** + * Issue a save of the contents of this model to the database + */ + saveAllToDatabase(lock: PlaylistLock): Promise +} diff --git a/packages/job-worker/src/ingest/model/IngestPartModel.ts b/packages/job-worker/src/ingest/model/IngestPartModel.ts index e047d08a94..65214e73b0 100644 --- a/packages/job-worker/src/ingest/model/IngestPartModel.ts +++ b/packages/job-worker/src/ingest/model/IngestPartModel.ts @@ -2,9 +2,9 @@ import { ReadonlyDeep } from 'type-fest' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' -import { ExpectedPackageFromRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItemRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' +import { IngestExpectedPackage } from './IngestExpectedPackage.js' export interface IngestPartModelReadonly { /** @@ -32,7 +32,7 @@ export interface IngestPartModelReadonly { /** * The ExpectedPackages belonging to this Part */ - readonly expectedPackages: ReadonlyDeep[] + readonly expectedPackages: ReadonlyDeep[] } /** * Wrap a Part and its contents in a view for Ingest operations @@ -49,10 +49,4 @@ export interface IngestPartModel extends IngestPartModelReadonly { * @param expectedPlayoutItems The new ExpectedPlayoutItems */ setExpectedPlayoutItems(expectedPlayoutItems: ExpectedPlayoutItemRundown[]): void - - /** - * Set the ExpectedPackages for the contents of this Part - * @param expectedPackages The new ExpectedPackages - */ - setExpectedPackages(expectedPackages: ExpectedPackageFromRundown[]): void } diff --git a/packages/job-worker/src/ingest/model/implementation/DocumentChangeTracker.ts b/packages/job-worker/src/ingest/model/implementation/DocumentChangeTracker.ts index c3396f04be..a45e3e19c4 100644 --- a/packages/job-worker/src/ingest/model/implementation/DocumentChangeTracker.ts +++ b/packages/job-worker/src/ingest/model/implementation/DocumentChangeTracker.ts @@ -98,6 +98,10 @@ export class DocumentChangeTracker }> { return Array.from(this.#deletedIds.values()) } + getDocumentsToSave(): ReadonlyMap { + return this.#documentsToSave + } + /** * Generate the mongodb BulkWrite operations for the documents known to this tracker * @returns mongodb BulkWrite operations diff --git a/packages/job-worker/src/ingest/model/implementation/ExpectedPackagesStore.ts b/packages/job-worker/src/ingest/model/implementation/ExpectedPackagesStore.ts index 9d79e27b74..b7508f80b3 100644 --- a/packages/job-worker/src/ingest/model/implementation/ExpectedPackagesStore.ts +++ b/packages/job-worker/src/ingest/model/implementation/ExpectedPackagesStore.ts @@ -1,71 +1,49 @@ -import { ExpectedPackageDBBase } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItemRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' -import { - ExpectedPackageId, - ExpectedPlayoutItemId, - PartId, - RundownId, - SegmentId, -} from '@sofie-automation/corelib/dist/dataModel/Ids' +import { ExpectedPlayoutItemId, PartId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { ReadonlyDeep } from 'type-fest' import { diffAndReturnLatestObjects, DocumentChanges, getDocumentChanges, setValuesAndTrackChanges } from './utils.js' +import type { IngestExpectedPackage } from '../IngestExpectedPackage.js' +import { ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' -function mutateExpectedPackage( - oldObj: ExpectedPackageType, - newObj: ExpectedPackageType -): ExpectedPackageType { - return { - ...newObj, - // Retain the created property - created: oldObj.created, - } -} - -export class ExpectedPackagesStore { +export class ExpectedPackagesStore { #expectedPlayoutItems: ExpectedPlayoutItemRundown[] - #expectedPackages: ExpectedPackageType[] + #expectedPackages: IngestExpectedPackage[] #expectedPlayoutItemsWithChanges = new Set() - #expectedPackagesWithChanges = new Set() + #expectedPackagesHasChanges = false get expectedPlayoutItems(): ReadonlyDeep { return this.#expectedPlayoutItems } - get expectedPackages(): ReadonlyDeep { - // Typescript is not happy with turning ExpectedPackageType into ReadonlyDeep because it can be a union - return this.#expectedPackages as any + get expectedPackages(): ReadonlyDeep[]> { + // Typescript is not happy because of the generic + return this.#expectedPackages as ReadonlyDeep>[] } get hasChanges(): boolean { - return this.#expectedPlayoutItemsWithChanges.size > 0 || this.#expectedPackagesWithChanges.size > 0 + return this.#expectedPlayoutItemsWithChanges.size > 0 || this.#expectedPackagesHasChanges } get expectedPlayoutItemsChanges(): DocumentChanges { return getDocumentChanges(this.#expectedPlayoutItemsWithChanges, this.#expectedPlayoutItems) } - get expectedPackagesChanges(): DocumentChanges { - return getDocumentChanges(this.#expectedPackagesWithChanges, this.#expectedPackages) - } clearChangedFlags(): void { this.#expectedPlayoutItemsWithChanges.clear() - this.#expectedPackagesWithChanges.clear() + this.#expectedPackagesHasChanges = false } #rundownId: RundownId - #segmentId: SegmentId | undefined #partId: PartId | undefined constructor( isBeingCreated: boolean, rundownId: RundownId, - segmentId: SegmentId | undefined, partId: PartId | undefined, expectedPlayoutItems: ExpectedPlayoutItemRundown[], - expectedPackages: ExpectedPackageType[] + expectedPackages: IngestExpectedPackage[] ) { this.#rundownId = rundownId - this.#segmentId = segmentId this.#partId = partId this.#expectedPlayoutItems = expectedPlayoutItems @@ -76,42 +54,38 @@ export class ExpectedPackagesStore boolean + ): void { this.#rundownId = rundownId - this.#segmentId = segmentId this.#partId = partId setValuesAndTrackChanges(this.#expectedPlayoutItemsWithChanges, this.#expectedPlayoutItems, { rundownId, partId, }) - setValuesAndTrackChanges(this.#expectedPackagesWithChanges, this.#expectedPackages, { - rundownId, - // @ts-expect-error Not all ExpectedPackage types have this property - segmentId, - partId, - }) + for (const expectedPackage of this.#expectedPackages) { + const mutatorChanged = updatePackageSource(expectedPackage.source) + + // The doc changed, track it as such + if (mutatorChanged) this.#expectedPackagesHasChanges = true + } } - compareToPreviousData(oldStore: ExpectedPackagesStore): void { + compareToPreviousData(oldStore: ExpectedPackagesStore): void { // Diff the objects, but don't update the stored copies diffAndReturnLatestObjects( this.#expectedPlayoutItemsWithChanges, oldStore.#expectedPlayoutItems, this.#expectedPlayoutItems ) - diffAndReturnLatestObjects( - this.#expectedPackagesWithChanges, - oldStore.#expectedPackages, - this.#expectedPackages, - mutateExpectedPackage - ) + this.#expectedPackagesHasChanges = true } setExpectedPlayoutItems(expectedPlayoutItems: ExpectedPlayoutItemRundown[]): void { @@ -127,19 +101,8 @@ export class ExpectedPackagesStore ({ - ...pkg, - partId: this.#partId, - segmentId: this.#segmentId, - rundownId: this.#rundownId, - })) - - this.#expectedPackages = diffAndReturnLatestObjects( - this.#expectedPackagesWithChanges, - this.#expectedPackages, - newExpectedPackages, - mutateExpectedPackage - ) + setExpectedPackages(expectedPackages: IngestExpectedPackage[]): void { + this.#expectedPackagesHasChanges = true + this.#expectedPackages = [...expectedPackages] } } diff --git a/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts b/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts index 41ad98f6a3..47b916a932 100644 --- a/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts +++ b/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts @@ -3,7 +3,9 @@ import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' import { ExpectedPackageDB, ExpectedPackageDBType, - ExpectedPackageFromRundown, + ExpectedPackageIngestSource, + ExpectedPackageIngestSourcePart, + ExpectedPackageIngestSourceRundownBaseline, } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItemRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' import { @@ -27,11 +29,12 @@ import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' import { JobContext, ProcessedShowStyleBase, ProcessedShowStyleVariant } from '../../../jobs/index.js' import { LazyInitialise, LazyInitialiseReadonly } from '../../../lib/lazy.js' import { getRundownId, getSegmentId } from '../../lib.js' -import { RundownLock } from '../../../jobs/lock.js' +import { PlaylistLock, RundownLock } from '../../../jobs/lock.js' import { IngestSegmentModel } from '../IngestSegmentModel.js' import { IngestSegmentModelImpl } from './IngestSegmentModelImpl.js' import { IngestPartModel } from '../IngestPartModel.js' import { + assertNever, clone, Complete, deleteAllUndefinedProperties, @@ -40,15 +43,9 @@ import { literal, } from '@sofie-automation/corelib/dist/lib' import { IngestPartModelImpl } from './IngestPartModelImpl.js' -import { DatabasePersistedModel } from '../../../modelBase.js' import { ExpectedPackagesStore } from './ExpectedPackagesStore.js' import { ReadonlyDeep } from 'type-fest' -import { - ExpectedPackageForIngestModel, - ExpectedPackageForIngestModelBaseline, - IngestModel, - IngestReplaceSegmentType, -} from '../IngestModel.js' +import { IngestDatabasePersistedModel, IngestModel, IngestReplaceSegmentType } from '../IngestModel.js' import { RundownNote } from '@sofie-automation/corelib/dist/dataModel/Notes' import { diffAndReturnLatestObjects } from './utils.js' import _ from 'underscore' @@ -61,6 +58,7 @@ import { generateWriteOpsForLazyDocuments } from './DocumentChangeTracker.js' import { IS_PRODUCTION } from '../../../environment.js' import { logger } from '../../../logging.js' import { NotificationsModelHelper } from '../../../notifications/NotificationsModelHelper.js' +import { IngestExpectedPackage } from '../IngestExpectedPackage.js' export interface IngestModelImplExistingData { rundown: DBRundown @@ -84,7 +82,7 @@ interface SegmentWrapper { /** * Cache of relevant documents for an Ingest Operation */ -export class IngestModelImpl implements IngestModel, DatabasePersistedModel { +export class IngestModelImpl implements IngestModel, IngestDatabasePersistedModel { public readonly isIngest = true public readonly rundownLock: RundownLock @@ -117,7 +115,7 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { readonly #piecesWithChanges = new Set() #piecesImpl: ReadonlyArray - readonly #rundownBaselineExpectedPackagesStore: ExpectedPackagesStore + readonly #rundownBaselineExpectedPackagesStore: ExpectedPackagesStore get rundownBaselineTimelineObjects(): LazyInitialiseReadonly { // Return a simplified view of what we store, of just `timelineObjectsString` @@ -146,7 +144,7 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { get expectedPlayoutItemsForRundownBaseline(): ReadonlyDeep[] { return [...this.#rundownBaselineExpectedPackagesStore.expectedPlayoutItems] } - get expectedPackagesForRundownBaseline(): ReadonlyDeep[] { + get expectedPackagesForRundownBaseline(): ReadonlyDeep[] { return [...this.#rundownBaselineExpectedPackagesStore.expectedPackages] } @@ -172,25 +170,14 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { const groupedExpectedPlayoutItems = groupByToMap(existingData.expectedPlayoutItems, 'partId') - const rundownExpectedPackages = existingData.expectedPackages.filter( - (pkg): pkg is ExpectedPackageFromRundown => - pkg.fromPieceType === ExpectedPackageDBType.PIECE || - pkg.fromPieceType === ExpectedPackageDBType.ADLIB_PIECE || - pkg.fromPieceType === ExpectedPackageDBType.ADLIB_ACTION - ) - const groupedExpectedPackages = groupByToMap(rundownExpectedPackages, 'partId') - const baselineExpectedPackages = existingData.expectedPackages.filter( - (pkg): pkg is ExpectedPackageForIngestModelBaseline => - pkg.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_ACTION || - pkg.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_PIECE || - pkg.fromPieceType === ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS + const { baselineExpectedPackages, groupedExpectedPackagesByPart } = groupExpectedPackages( + existingData.expectedPackages ) this.#rundownBaselineExpectedPackagesStore = new ExpectedPackagesStore( false, this.rundownId, undefined, - undefined, groupedExpectedPlayoutItems.get(undefined) ?? [], baselineExpectedPackages ) @@ -209,7 +196,7 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { groupedAdLibPieces.get(part._id) ?? [], groupedAdLibActions.get(part._id) ?? [], groupedExpectedPlayoutItems.get(part._id) ?? [], - groupedExpectedPackages.get(part._id) ?? [] + groupedExpectedPackagesByPart.get(part._id) ?? [] ) ) this.segmentsImpl.set(segment._id, { @@ -242,7 +229,6 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { true, this.rundownId, undefined, - undefined, [], [] ) @@ -352,18 +338,20 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { return undefined } - findExpectedPackage(packageId: ExpectedPackageId): ReadonlyDeep | undefined { - const baselinePackage = this.#rundownBaselineExpectedPackagesStore.expectedPackages.find( - (pkg) => pkg._id === packageId - ) - if (baselinePackage) return baselinePackage + findExpectedPackageIngestSources(packageId: ExpectedPackageId): ReadonlyDeep[] { + const sources: ReadonlyDeep[] = [] + + for (const baselinePackage of this.#rundownBaselineExpectedPackagesStore.expectedPackages) { + if (baselinePackage.packageId === packageId) sources.push(baselinePackage.source) + } for (const part of this.getAllOrderedParts()) { - const partPackage = part.expectedPackages.find((pkg) => pkg._id === packageId) - if (partPackage) return partPackage + for (const partPackage of part.expectedPackages) { + if (partPackage.packageId === packageId) sources.push(partPackage.source) + } } - return undefined + return sources } removeSegment(id: SegmentId): void { @@ -413,10 +401,6 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { setExpectedPlayoutItemsForRundownBaseline(expectedPlayoutItems: ExpectedPlayoutItemRundown[]): void { this.#rundownBaselineExpectedPackagesStore.setExpectedPlayoutItems(expectedPlayoutItems) } - setExpectedPackagesForRundownBaseline(expectedPackages: ExpectedPackageForIngestModelBaseline[]): void { - // Future: should these be here, or held as part of each adlib? - this.#rundownBaselineExpectedPackagesStore.setExpectedPackages(expectedPackages) - } setRundownData( rundownData: IBlueprintRundown, @@ -473,7 +457,8 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { timelineObjectsBlob: PieceTimelineObjectsBlob, adlibPieces: RundownBaselineAdLibItem[], adlibActions: RundownBaselineAdLibAction[], - pieces: Piece[] + pieces: Piece[], + expectedPackages: IngestExpectedPackage[] ): Promise { const [loadedRundownBaselineObjs, loadedRundownBaselineAdLibPieces, loadedRundownBaselineAdLibActions] = await Promise.all([ @@ -536,6 +521,9 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { }) ) this.#piecesImpl = diffAndReturnLatestObjects(this.#piecesWithChanges, this.#piecesImpl, newPieces) + + // Future: should these be here, or held as part of each adlib? + this.#rundownBaselineExpectedPackagesStore.setExpectedPackages(expectedPackages) } setRundownOrphaned(orphaned: RundownOrphanedReason | undefined): void { @@ -673,7 +661,7 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { this.#disposed = true } - async saveAllToDatabase(): Promise { + async saveAllToDatabase(playlistLock: PlaylistLock): Promise { if (this.#disposed) { throw new Error('Cannot save disposed IngestModel') } @@ -682,6 +670,10 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { throw new Error('Cannot save changes with released RundownLock') } + if (this.#rundownImpl && playlistLock.playlistId !== this.#rundownImpl.playlistId) { + throw new Error('Cannot save changes with incorrect PlaylistLock') + } + const span = this.context.startSpan('IngestModelImpl.saveAllToDatabase') // Ensure there are no duplicate part ids @@ -691,7 +683,7 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { partIds.add(part.part._id) } - const saveHelper = new SaveIngestModelHelper() + const saveHelper = new SaveIngestModelHelper(this.rundownId) for (const [segmentId, segment] of this.segmentsImpl.entries()) { saveHelper.addSegment(segment.segmentModel, segment.deleted) if (segment.deleted) { @@ -734,3 +726,50 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { span?.end() } } + +function groupExpectedPackages(expectedPackages: ExpectedPackageDB[]) { + const baselineExpectedPackages: IngestExpectedPackage[] = [] + const groupedExpectedPackagesByPart = new Map[]>() + + for (const expectedPackage of expectedPackages) { + for (const source of expectedPackage.ingestSources) { + switch (source.fromPieceType) { + case ExpectedPackageDBType.BASELINE_PIECE: + case ExpectedPackageDBType.BASELINE_ADLIB_ACTION: + case ExpectedPackageDBType.BASELINE_ADLIB_PIECE: + case ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: + baselineExpectedPackages.push({ + packageId: expectedPackage._id, + package: expectedPackage.package, + source: source, + }) + break + case ExpectedPackageDBType.PIECE: + case ExpectedPackageDBType.ADLIB_PIECE: + case ExpectedPackageDBType.ADLIB_ACTION: { + const partPackages = groupedExpectedPackagesByPart.get(source.partId) ?? [] + partPackages.push({ + packageId: expectedPackage._id, + package: expectedPackage.package, + source: source, + }) + groupedExpectedPackagesByPart.set(source.partId, partPackages) + break + } + case ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: + case ExpectedPackageDBType.BUCKET_ADLIB: + case ExpectedPackageDBType.BUCKET_ADLIB_ACTION: + // Ignore + break + default: + assertNever(source) + break + } + } + } + + return { + baselineExpectedPackages, + groupedExpectedPackagesByPart, + } +} diff --git a/packages/job-worker/src/ingest/model/implementation/IngestPartModelImpl.ts b/packages/job-worker/src/ingest/model/implementation/IngestPartModelImpl.ts index b5b3c1e7c9..0e6c4e4b34 100644 --- a/packages/job-worker/src/ingest/model/implementation/IngestPartModelImpl.ts +++ b/packages/job-worker/src/ingest/model/implementation/IngestPartModelImpl.ts @@ -6,7 +6,6 @@ import { AdLibActionId, PieceId, RundownId, SegmentId } from '@sofie-automation/ import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' import { ExpectedPlayoutItemRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' -import { ExpectedPackageFromRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' import { ExpectedPackagesStore } from './ExpectedPackagesStore.js' import { @@ -16,13 +15,15 @@ import { getDocumentChanges, setValuesAndTrackChanges, } from './utils.js' +import type { IngestExpectedPackage } from '../IngestExpectedPackage.js' +import { ExpectedPackageIngestSourcePart } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' export class IngestPartModelImpl implements IngestPartModel { readonly partImpl: DBPart readonly #pieces: Piece[] readonly #adLibPieces: AdLibPiece[] readonly #adLibActions: AdLibAction[] - readonly expectedPackagesStore: ExpectedPackagesStore + readonly expectedPackagesStore: ExpectedPackagesStore #setPartValue(key: T, newValue: DBPart[T]): void { if (newValue === undefined) { @@ -86,7 +87,7 @@ export class IngestPartModelImpl implements IngestPartModel { get expectedPlayoutItems(): ReadonlyDeep[] { return [...this.expectedPackagesStore.expectedPlayoutItems] } - get expectedPackages(): ReadonlyDeep[] { + get expectedPackages(): ReadonlyDeep[] { return [...this.expectedPackagesStore.expectedPackages] } @@ -135,7 +136,7 @@ export class IngestPartModelImpl implements IngestPartModel { adLibPieces: AdLibPiece[], adLibActions: AdLibAction[], expectedPlayoutItems: ExpectedPlayoutItemRundown[], - expectedPackages: ExpectedPackageFromRundown[] + expectedPackages: IngestExpectedPackage[] ) { this.partImpl = part this.#pieces = pieces @@ -159,7 +160,6 @@ export class IngestPartModelImpl implements IngestPartModel { this.expectedPackagesStore = new ExpectedPackagesStore( isBeingCreated, part.rundownId, - part.segmentId, part._id, expectedPlayoutItems, expectedPackages @@ -172,7 +172,7 @@ export class IngestPartModelImpl implements IngestPartModel { /** * This IngestPartModel replaces an existing one. - * Run some comparisons to ensure that + * Run some comparisons to ensure that the changed flags are set correctly * @param previousModel */ compareToPreviousModel(previousModel: IngestPartModelImpl): void { @@ -205,7 +205,14 @@ export class IngestPartModelImpl implements IngestPartModel { this.#compareAndSetPartValue('segmentId', segmentId) this.#compareAndSetPartValue('rundownId', rundownId) - this.expectedPackagesStore.setOwnerIds(rundownId, segmentId, this.part._id) + this.expectedPackagesStore.setOwnerIds(rundownId, this.part._id, (pkgSource) => { + if (pkgSource.partId !== this.part._id || pkgSource.segmentId !== segmentId) { + pkgSource.partId = this.part._id + pkgSource.segmentId = segmentId + return true + } + return false + }) setValuesAndTrackChanges(this.#piecesWithChanges, this.#pieces, { startRundownId: rundownId, @@ -225,8 +232,4 @@ export class IngestPartModelImpl implements IngestPartModel { setExpectedPlayoutItems(expectedPlayoutItems: ExpectedPlayoutItemRundown[]): void { this.expectedPackagesStore.setExpectedPlayoutItems(expectedPlayoutItems) } - setExpectedPackages(expectedPackages: ExpectedPackageFromRundown[]): void { - // Future: should these be here, or held as part of each adlib/piece? - this.expectedPackagesStore.setExpectedPackages(expectedPackages) - } } diff --git a/packages/job-worker/src/ingest/model/implementation/IngestSegmentModelImpl.ts b/packages/job-worker/src/ingest/model/implementation/IngestSegmentModelImpl.ts index 5a965abf46..bcd29965d1 100644 --- a/packages/job-worker/src/ingest/model/implementation/IngestSegmentModelImpl.ts +++ b/packages/job-worker/src/ingest/model/implementation/IngestSegmentModelImpl.ts @@ -1,4 +1,4 @@ -import { PartId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { PartId, RundownId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { ReadonlyDeep } from 'type-fest' import { DBSegment, SegmentOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Segment' import { IngestReplacePartType, IngestSegmentModel } from '../IngestSegmentModel.js' @@ -12,6 +12,13 @@ import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' import { calculatePartExpectedDurationWithTransition } from '@sofie-automation/corelib/dist/playout/timings' import { clone } from '@sofie-automation/corelib/dist/lib' import { getPartId } from '../../lib.js' +import { + ExpectedPackageDBType, + ExpectedPackageIngestSourceAdlibAction, + ExpectedPackageIngestSourcePart, + ExpectedPackageIngestSourcePiece, +} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { ExpectedPackageCollector, type IngestExpectedPackage } from '../IngestExpectedPackage.js' /** * A light wrapper around the IngestPartModel, so that we can track the deletions while still accessing the contents @@ -207,7 +214,7 @@ export class IngestSegmentModelImpl implements IngestSegmentModel { replacePart( rawPart: IngestReplacePartType, pieces: Piece[], - adLibPiece: AdLibPiece[], + adLibPieces: AdLibPiece[], adLibActions: AdLibAction[] ): IngestPartModel { const part: DBPart = { @@ -224,14 +231,23 @@ export class IngestSegmentModelImpl implements IngestSegmentModel { const oldPart = this.partsImpl.get(part._id) + const expectedPackages = generateExpectedPackagesForPart( + part.rundownId, + part.segmentId, + part._id, + pieces, + adLibPieces, + adLibActions + ) + const partModel = new IngestPartModelImpl( !oldPart, clone(part), clone(pieces), - clone(adLibPiece), + clone(adLibPieces), clone(adLibActions), [], - [] + expectedPackages ) partModel.setOwnerIds(this.segment.rundownId, this.segment._id) @@ -242,3 +258,48 @@ export class IngestSegmentModelImpl implements IngestSegmentModel { return partModel } } + +function generateExpectedPackagesForPart( + rundownId: RundownId, + segmentId: SegmentId, + partId: PartId, + pieces: Piece[], + adLibPieces: AdLibPiece[], + adLibActions: AdLibAction[] +): IngestExpectedPackage[] { + const collector = new ExpectedPackageCollector(rundownId) + + // This expects to generate multiple documents with the same packageId, these get deduplicated during saving. + // This should only concern itself with avoiding duplicates with the same source + + // Populate the ingestSources + for (const piece of pieces) { + if (piece.expectedPackages) + collector.addPackagesWithSource(piece.expectedPackages, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: piece._id, + partId: partId, + segmentId: segmentId, + }) + } + for (const piece of adLibPieces) { + if (piece.expectedPackages) + collector.addPackagesWithSource(piece.expectedPackages, { + fromPieceType: ExpectedPackageDBType.ADLIB_PIECE, + pieceId: piece._id, + partId: partId, + segmentId: segmentId, + }) + } + for (const piece of adLibActions) { + if (piece.expectedPackages) + collector.addPackagesWithSource(piece.expectedPackages, { + fromPieceType: ExpectedPackageDBType.ADLIB_ACTION, + pieceId: piece._id, + partId: partId, + segmentId: segmentId, + }) + } + + return collector.finish() +} diff --git a/packages/job-worker/src/ingest/model/implementation/LoadIngestModel.ts b/packages/job-worker/src/ingest/model/implementation/LoadIngestModel.ts index 628a1ce24c..7f25e02afc 100644 --- a/packages/job-worker/src/ingest/model/implementation/LoadIngestModel.ts +++ b/packages/job-worker/src/ingest/model/implementation/LoadIngestModel.ts @@ -3,8 +3,7 @@ import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { JobContext } from '../../../jobs/index.js' import { ReadonlyDeep } from 'type-fest' import { RundownLock } from '../../../jobs/lock.js' -import { IngestModel } from '../IngestModel.js' -import { DatabasePersistedModel } from '../../../modelBase.js' +import { IngestDatabasePersistedModel, IngestModel } from '../IngestModel.js' import { getRundownId } from '../../lib.js' import { ExpectedPlayoutItemRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' import { RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' @@ -23,7 +22,7 @@ export async function loadIngestModelFromRundown( context: JobContext, rundownLock: RundownLock, rundown: ReadonlyDeep -): Promise { +): Promise { const span = context.startSpan('IngestModel.loadFromRundown') if (span) span.setLabel('rundownId', unprotectString(rundown._id)) @@ -57,7 +56,7 @@ export async function loadIngestModelFromRundownExternalId( context: JobContext, rundownLock: RundownLock, rundownExternalId: string -): Promise { +): Promise { const span = context.startSpan('IngestModel.loadFromExternalId') if (span) span.setLabel('externalId', rundownExternalId) diff --git a/packages/job-worker/src/ingest/model/implementation/SaveIngestModel.ts b/packages/job-worker/src/ingest/model/implementation/SaveIngestModel.ts index 296b4b2b30..5bd2806aeb 100644 --- a/packages/job-worker/src/ingest/model/implementation/SaveIngestModel.ts +++ b/packages/job-worker/src/ingest/model/implementation/SaveIngestModel.ts @@ -1,8 +1,12 @@ import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' -import { ExpectedPackageDB } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { + ExpectedPackageDB, + ExpectedPackageDBType, + isPackageReferencedByPlayout, +} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItem } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' -import { PieceId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { PieceId, ExpectedPackageId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' @@ -12,9 +16,14 @@ import { IngestSegmentModelImpl } from './IngestSegmentModelImpl.js' import { DocumentChangeTracker } from './DocumentChangeTracker.js' import { logger } from '../../../logging.js' import { ProtectedString } from '@sofie-automation/corelib/dist/protectedString' +import { IngestExpectedPackage } from '../IngestExpectedPackage.js' +import { AnyBulkWriteOperation } from 'mongodb' +import { normalizeArrayToMap } from '@sofie-automation/corelib/dist/lib' export class SaveIngestModelHelper { - #expectedPackages = new DocumentChangeTracker() + readonly #rundownId: RundownId + + #expectedPackages: IngestExpectedPackage[] = [] #expectedPlayoutItems = new DocumentChangeTracker() #segments = new DocumentChangeTracker() @@ -23,11 +32,15 @@ export class SaveIngestModelHelper { #adLibPieces = new DocumentChangeTracker() #adLibActions = new DocumentChangeTracker() - addExpectedPackagesStore( - store: ExpectedPackagesStore, + constructor(rundownId: RundownId) { + this.#rundownId = rundownId + } + + addExpectedPackagesStore( + store: ExpectedPackagesStore, deleteAll?: boolean ): void { - this.#expectedPackages.addChanges(store.expectedPackagesChanges, deleteAll ?? false) + if (!deleteAll) this.#expectedPackages.push(...store.expectedPackages) this.#expectedPlayoutItems.addChanges(store.expectedPlayoutItemsChanges, deleteAll ?? false) } addSegment(segment: IngestSegmentModelImpl, segmentIsDeleted: boolean): void { @@ -69,7 +82,6 @@ export class SaveIngestModelHelper { commit(context: JobContext): Array> { // Log deleted ids: const deletedIds: { [key: string]: ProtectedString[] } = { - expectedPackages: this.#expectedPackages.getDeletedIds(), expectedPlayoutItems: this.#expectedPlayoutItems.getDeletedIds(), segments: this.#segments.getDeletedIds(), parts: this.#parts.getDeletedIds(), @@ -84,7 +96,7 @@ export class SaveIngestModelHelper { } return [ - context.directCollections.ExpectedPackages.bulkWrite(this.#expectedPackages.generateWriteOps()), + writeExpectedPackagesChangesForRundown(context, this.#rundownId, this.#expectedPackages), context.directCollections.ExpectedPlayoutItems.bulkWrite(this.#expectedPlayoutItems.generateWriteOps()), context.directCollections.Segments.bulkWrite(this.#segments.generateWriteOps()), @@ -95,3 +107,118 @@ export class SaveIngestModelHelper { ] } } + +export async function writeExpectedPackagesChangesForRundown( + context: JobContext, + rundownId: RundownId | null, + documentsToSave: IngestExpectedPackage[] +): Promise { + const existingDocs = (await context.directCollections.ExpectedPackages.findFetch( + { + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + }, + { + projection: { + _id: 1, + playoutSources: 1, // This feels a bit excessive, but the whole object is needed for `isPackageReferencedByPlayout` + }, + } + )) as Pick[] + const existingDocsMap = normalizeArrayToMap(existingDocs, '_id') + + const packagesToSave = new Map>() + for (const doc of documentsToSave) { + const partialDoc = packagesToSave.get(doc.packageId) + + if (partialDoc) { + // Add the source to the existing document + partialDoc.ingestSources.push(doc.source) + + // Maybe this should check for duplicates, but the point where these documents are generated should be handling that. + } else { + // Add a new document + // Future: omit 'playoutSources from this doc + packagesToSave.set(doc.packageId, { + _id: doc.packageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + created: Date.now(), + package: doc.package, + ingestSources: [doc.source], + }) + } + } + + // Generate any insert and update operations + const ops: AnyBulkWriteOperation[] = [] + for (const doc of packagesToSave.values()) { + const existingDoc = existingDocsMap.get(doc._id) + if (!existingDoc) { + // Insert this new document + ops.push({ + insertOne: { + document: { + ...doc, + playoutSources: { + pieceInstanceIds: [], + }, + }, + }, + }) + } else { + // Document already exists, perform an update to preserve other fields + // Future: would it be beneficial to perform some diffing to only update the field if it has changed? + ops.push({ + updateOne: { + filter: { _id: doc._id }, + update: { + // Update every field that we want to define + $set: { + ingestSources: doc.ingestSources, + }, + }, + }, + }) + } + } + + // Look over the existing documents, and see is no longer referenced + const idsToDelete: ExpectedPackageId[] = [] + const idsToClearSources: ExpectedPackageId[] = [] + + for (const doc of existingDocs) { + // Skip if this document is in the list of documents to save + if (packagesToSave.has(doc._id)) continue + + if (isPackageReferencedByPlayout(doc)) { + idsToClearSources.push(doc._id) + } else { + idsToDelete.push(doc._id) + } + } + + if (idsToDelete.length > 0) { + ops.push({ + deleteMany: { + filter: { _id: { $in: idsToDelete as any } }, + }, + }) + } + if (idsToClearSources.length > 0) { + ops.push({ + updateMany: { + filter: { _id: { $in: idsToClearSources as any } }, + update: { + $set: { + ingestSources: [], + }, + }, + }, + }) + } + + if (ops.length > 0) await context.directCollections.ExpectedPackages.bulkWrite(ops) +} diff --git a/packages/job-worker/src/ingest/model/implementation/__tests__/SaveIngestModel.spec.ts b/packages/job-worker/src/ingest/model/implementation/__tests__/SaveIngestModel.spec.ts new file mode 100644 index 0000000000..211f7b3dba --- /dev/null +++ b/packages/job-worker/src/ingest/model/implementation/__tests__/SaveIngestModel.spec.ts @@ -0,0 +1,364 @@ +import { ExpectedPackage } from '@sofie-automation/blueprints-integration' +import { + ExpectedPackageDB, + ExpectedPackageDBType, + ExpectedPackageIngestSourcePiece, + getExpectedPackageId, +} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { PartId, PieceId, PieceInstanceId, RundownId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { protectString } from '@sofie-automation/corelib/dist/protectedString' +import { setupDefaultJobEnvironment } from '../../../../__mocks__/context.js' +import { IngestExpectedPackage } from '../../IngestExpectedPackage.js' +import { writeExpectedPackagesChangesForRundown } from '../SaveIngestModel.js' + +describe('SaveIngestModel', () => { + describe('writeExpectedPackagesChangesForRundown', () => { + const rundownId = protectString('rundown0') + + function createMockExpectedPackage(id: string): ExpectedPackage.ExpectedPackageMediaFile { + return { + _id: id, + type: ExpectedPackage.PackageType.MEDIA_FILE, + layers: ['layer0'], + content: { filePath: `/media/${id}.mp4` }, + version: {}, + contentVersionHash: `hash_${id}`, + sources: [], + sideEffect: {}, + } + } + + function createIngestExpectedPackage( + pkg: ExpectedPackage.Base, + pieceId: string, + partId = 'part0', + segmentId = 'segment0' + ): IngestExpectedPackage { + return { + packageId: getExpectedPackageId(rundownId, pkg), + package: pkg, + source: { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString(pieceId), + partId: protectString(partId), + segmentId: protectString(segmentId), + blueprintPackageId: pkg._id, + listenToPackageInfoUpdates: false, + }, + } + } + + async function createExistingPackage( + context: ReturnType, + pkg: ExpectedPackage.Base, + options?: { + rundownId?: RundownId + ingestSource?: { + pieceId: string + partId?: string + segmentId?: string + } + playoutInstanceIds?: PieceInstanceId[] + created?: number + } + ): Promise { + const packageId = getExpectedPackageId(options?.rundownId ?? rundownId, pkg) + const doc: ExpectedPackageDB = { + _id: packageId, + studioId: context.studioId, + rundownId: options?.rundownId ?? rundownId, + bucketId: null, + created: options?.created ?? Date.now(), + package: pkg, + ingestSources: options?.ingestSource + ? [ + { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString(options.ingestSource.pieceId), + partId: protectString(options.ingestSource.partId ?? 'part0'), + segmentId: protectString(options.ingestSource.segmentId ?? 'segment0'), + blueprintPackageId: pkg._id, + listenToPackageInfoUpdates: false, + }, + ] + : [], + playoutSources: { + pieceInstanceIds: options?.playoutInstanceIds ?? [], + }, + } + await context.directCollections.ExpectedPackages.insertOne(doc) + return doc + } + + it('no documents to save and no existing packages', async () => { + const context = setupDefaultJobEnvironment() + + await writeExpectedPackagesChangesForRundown(context, rundownId, []) + + // Should only findFetch, no bulkWrite needed + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(1) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + }) + + it('inserts new ExpectedPackage when none exist', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg = createMockExpectedPackage('pkg0') + const ingestPackage = createIngestExpectedPackage(expectedPkg, 'piece0') + + await writeExpectedPackagesChangesForRundown(context, rundownId, [ingestPackage]) + + // Verify operations: findFetch + bulkWrite + insertOne + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(3) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + expect(context.mockCollections.ExpectedPackages.operations[1].type).toBe('bulkWrite') + expect(context.mockCollections.ExpectedPackages.operations[2].type).toBe('insertOne') + + // Verify the inserted package + const insertedDoc = await context.directCollections.ExpectedPackages.findOne(ingestPackage.packageId) + expect(insertedDoc).toMatchObject({ + _id: ingestPackage.packageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: expectedPkg, + ingestSources: [ingestPackage.source], + playoutSources: { + pieceInstanceIds: [], + }, + } satisfies Omit) + expect(insertedDoc?.created).toBeGreaterThan(0) + }) + + it('updates existing ExpectedPackage ingestSources', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg = createMockExpectedPackage('pkg0') + const packageId = getExpectedPackageId(rundownId, expectedPkg) + const originalCreated = Date.now() - 10000 + + // Pre-populate with existing package + await createExistingPackage(context, expectedPkg, { + created: originalCreated, + ingestSource: { pieceId: 'oldPiece', partId: 'oldPart', segmentId: 'oldSegment' }, + playoutInstanceIds: [protectString('existingPieceInstance')], + }) + context.mockCollections.ExpectedPackages.clearOpLog() + + // Create new ingest source + const newIngestPackage = createIngestExpectedPackage(expectedPkg, 'newPiece', 'newPart', 'newSegment') + + await writeExpectedPackagesChangesForRundown(context, rundownId, [newIngestPackage]) + + // Verify operations: findFetch + bulkWrite + update + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(3) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + expect(context.mockCollections.ExpectedPackages.operations[1].type).toBe('bulkWrite') + expect(context.mockCollections.ExpectedPackages.operations[2].type).toBe('update') + + // Verify the update + const updatedDoc = await context.directCollections.ExpectedPackages.findOne(packageId) + expect(updatedDoc?.ingestSources).toEqual([newIngestPackage.source]) + // Verify created timestamp was preserved + expect(updatedDoc?.created).toBe(originalCreated) + // Verify playoutSources were preserved + expect(updatedDoc?.playoutSources.pieceInstanceIds).toHaveLength(1) + }) + + it('deletes ExpectedPackage when no longer referenced by ingest or playout', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg = createMockExpectedPackage('pkg0') + const packageId = getExpectedPackageId(rundownId, expectedPkg) + + // Pre-populate with existing package (no playout references) + await createExistingPackage(context, expectedPkg, { + ingestSource: { pieceId: 'piece0' }, + }) + context.mockCollections.ExpectedPackages.clearOpLog() + + // Call with empty documentsToSave + await writeExpectedPackagesChangesForRundown(context, rundownId, []) + + // Verify operations: findFetch + bulkWrite + remove + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(3) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + expect(context.mockCollections.ExpectedPackages.operations[1].type).toBe('bulkWrite') + expect(context.mockCollections.ExpectedPackages.operations[2].type).toBe('remove') + + // Verify it was deleted + expect(await context.directCollections.ExpectedPackages.findOne(packageId)).toBeUndefined() + }) + + it('clears ingestSources but preserves package when still referenced by playout', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg = createMockExpectedPackage('pkg0') + const packageId = getExpectedPackageId(rundownId, expectedPkg) + + // Pre-populate with existing package that has playout references + await createExistingPackage(context, expectedPkg, { + ingestSource: { pieceId: 'piece0' }, + playoutInstanceIds: [protectString('pieceInstance0')], + }) + context.mockCollections.ExpectedPackages.clearOpLog() + + // Call with empty documentsToSave + await writeExpectedPackagesChangesForRundown(context, rundownId, []) + + // Verify operations: findFetch + bulkWrite + update + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(3) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + expect(context.mockCollections.ExpectedPackages.operations[1].type).toBe('bulkWrite') + expect(context.mockCollections.ExpectedPackages.operations[2].type).toBe('update') + + // Verify ingestSources were cleared but document still exists + const updatedDoc = await context.directCollections.ExpectedPackages.findOne(packageId) + expect(updatedDoc).toBeDefined() + expect(updatedDoc?.ingestSources).toEqual([]) + expect(updatedDoc?.playoutSources.pieceInstanceIds).toHaveLength(1) + }) + + it('merges multiple ingest sources for the same package', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg = createMockExpectedPackage('sharedPkg') + const packageId = getExpectedPackageId(rundownId, expectedPkg) + + // Create two sources for the same package + const ingestPackage1 = createIngestExpectedPackage(expectedPkg, 'piece1', 'part1', 'segment1') + const ingestPackage2 = createIngestExpectedPackage(expectedPkg, 'piece2', 'part2', 'segment2') + + await writeExpectedPackagesChangesForRundown(context, rundownId, [ingestPackage1, ingestPackage2]) + + // Verify only one insert (sources should be merged) + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(3) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + expect(context.mockCollections.ExpectedPackages.operations[1].type).toBe('bulkWrite') + expect(context.mockCollections.ExpectedPackages.operations[1].args[0]).toBe(1) // 1 operation + expect(context.mockCollections.ExpectedPackages.operations[2].type).toBe('insertOne') + + // Verify both sources are present + const insertedDoc = await context.directCollections.ExpectedPackages.findOne(packageId) + expect(insertedDoc?.ingestSources).toHaveLength(2) + expect(insertedDoc?.ingestSources).toContainEqual(ingestPackage1.source) + expect(insertedDoc?.ingestSources).toContainEqual(ingestPackage2.source) + }) + + it('handles mix of insert, update, delete, and clear operations', async () => { + const context = setupDefaultJobEnvironment() + + const pkg1 = createMockExpectedPackage('pkg1') // Will be updated + const pkg2 = createMockExpectedPackage('pkg2') // Will be deleted (no playout refs) + const pkg3 = createMockExpectedPackage('pkg3') // Will have sources cleared (has playout refs) + const pkg4 = createMockExpectedPackage('pkg4') // Will be inserted + const packageId1 = getExpectedPackageId(rundownId, pkg1) + const packageId2 = getExpectedPackageId(rundownId, pkg2) + const packageId3 = getExpectedPackageId(rundownId, pkg3) + const packageId4 = getExpectedPackageId(rundownId, pkg4) + + // Setup existing packages + await createExistingPackage(context, pkg1, { + ingestSource: { pieceId: 'oldPiece1', partId: 'oldPart1', segmentId: 'oldSegment1' }, + }) + await createExistingPackage(context, pkg2, { + ingestSource: { pieceId: 'piece2', partId: 'part2', segmentId: 'segment2' }, + }) + await createExistingPackage(context, pkg3, { + ingestSource: { pieceId: 'piece3', partId: 'part3', segmentId: 'segment3' }, + playoutInstanceIds: [protectString('pi3')], + }) + context.mockCollections.ExpectedPackages.clearOpLog() + + // documentsToSave contains: updated pkg1 and new pkg4 + const ingestPackage1 = createIngestExpectedPackage(pkg1, 'newPiece1', 'newPart1', 'newSegment1') + const ingestPackage4 = createIngestExpectedPackage(pkg4, 'piece4', 'part4', 'segment4') + + await writeExpectedPackagesChangesForRundown(context, rundownId, [ingestPackage1, ingestPackage4]) + + // Verify final state + // pkg1: updated + const doc1 = await context.directCollections.ExpectedPackages.findOne(packageId1) + expect(doc1).toBeDefined() + expect((doc1?.ingestSources[0] as ExpectedPackageIngestSourcePiece).pieceId).toBe( + protectString('newPiece1') + ) + + // pkg2: deleted + const doc2 = await context.directCollections.ExpectedPackages.findOne(packageId2) + expect(doc2).toBeUndefined() + + // pkg3: sources cleared but preserved + const doc3 = await context.directCollections.ExpectedPackages.findOne(packageId3) + expect(doc3).toBeDefined() + expect(doc3?.ingestSources).toEqual([]) + expect(doc3?.playoutSources.pieceInstanceIds).toHaveLength(1) + + // pkg4: inserted + const doc4 = await context.directCollections.ExpectedPackages.findOne(packageId4) + expect(doc4).toBeDefined() + expect(doc4?.package).toEqual(pkg4) + }) + + it('preserves playoutSources when updating ingestSources', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg = createMockExpectedPackage('pkg0') + const packageId = getExpectedPackageId(rundownId, expectedPkg) + + // Pre-populate with package that has both ingest and playout sources + await createExistingPackage(context, expectedPkg, { + ingestSource: { pieceId: 'oldPiece', partId: 'oldPart', segmentId: 'oldSegment' }, + playoutInstanceIds: [ + protectString('pieceInstance1'), + protectString('pieceInstance2'), + ], + }) + context.mockCollections.ExpectedPackages.clearOpLog() + + // Update with new ingest source + const newIngestPackage = createIngestExpectedPackage(expectedPkg, 'newPiece', 'newPart', 'newSegment') + + await writeExpectedPackagesChangesForRundown(context, rundownId, [newIngestPackage]) + + // Verify playoutSources were preserved + const updatedDoc = await context.directCollections.ExpectedPackages.findOne(packageId) + expect(updatedDoc?.ingestSources).toEqual([newIngestPackage.source]) + expect(updatedDoc?.playoutSources.pieceInstanceIds).toHaveLength(2) + expect(updatedDoc?.playoutSources.pieceInstanceIds).toContain( + protectString('pieceInstance1') + ) + expect(updatedDoc?.playoutSources.pieceInstanceIds).toContain( + protectString('pieceInstance2') + ) + }) + + it('only affects packages for the specified rundown', async () => { + const context = setupDefaultJobEnvironment() + const otherRundownId = protectString('otherRundown') + + const pkg = createMockExpectedPackage('pkg0') + const packageIdForRundown = getExpectedPackageId(rundownId, pkg) + const packageIdForOtherRundown = getExpectedPackageId(otherRundownId, pkg) + + // Create packages in both rundowns + await createExistingPackage(context, pkg, { + ingestSource: { pieceId: 'piece0' }, + }) + await createExistingPackage(context, pkg, { + rundownId: otherRundownId, + ingestSource: { pieceId: 'piece0' }, + }) + context.mockCollections.ExpectedPackages.clearOpLog() + + // Delete all packages for rundownId by passing empty array + await writeExpectedPackagesChangesForRundown(context, rundownId, []) + + // Verify package for rundownId was deleted + expect(await context.directCollections.ExpectedPackages.findOne(packageIdForRundown)).toBeUndefined() + + // Verify package for otherRundownId still exists + expect(await context.directCollections.ExpectedPackages.findOne(packageIdForOtherRundown)).toBeDefined() + }) + }) +}) diff --git a/packages/job-worker/src/ingest/mosDevice/__tests__/__snapshots__/mosIngest.test.ts.snap b/packages/job-worker/src/ingest/mosDevice/__tests__/__snapshots__/mosIngest.test.ts.snap index 8c1b68d443..45148a92f6 100644 --- a/packages/job-worker/src/ingest/mosDevice/__tests__/__snapshots__/mosIngest.test.ts.snap +++ b/packages/job-worker/src/ingest/mosDevice/__tests__/__snapshots__/mosIngest.test.ts.snap @@ -15,6 +15,23 @@ exports[`Test recieved mos ingest payloads mosRoCreate 1`] = ` "5meLdE_m5k28xXw1vtX2JX8mSYQ_", ], "studioId": "mockStudio4", + "tTimers": [ + { + "index": 1, + "label": "", + "mode": null, + }, + { + "index": 2, + "label": "", + "mode": null, + }, + { + "index": 3, + "label": "", + "mode": null, + }, + ], "timing": { "type": "none", }, @@ -307,6 +324,23 @@ exports[`Test recieved mos ingest payloads mosRoCreate: replace existing 1`] = ` "5meLdE_m5k28xXw1vtX2JX8mSYQ_", ], "studioId": "mockStudio4", + "tTimers": [ + { + "index": 1, + "label": "", + "mode": null, + }, + { + "index": 2, + "label": "", + "mode": null, + }, + { + "index": 3, + "label": "", + "mode": null, + }, + ], "timing": { "type": "none", }, @@ -591,6 +625,23 @@ exports[`Test recieved mos ingest payloads mosRoFullStory: Valid data 1`] = ` "5meLdE_m5k28xXw1vtX2JX8mSYQ_", ], "studioId": "mockStudio4", + "tTimers": [ + { + "index": 1, + "label": "", + "mode": null, + }, + { + "index": 2, + "label": "", + "mode": null, + }, + { + "index": 3, + "label": "", + "mode": null, + }, + ], "timing": { "type": "none", }, @@ -896,6 +947,23 @@ exports[`Test recieved mos ingest payloads mosRoReadyToAir: Update ro 1`] = ` "5meLdE_m5k28xXw1vtX2JX8mSYQ_", ], "studioId": "mockStudio4", + "tTimers": [ + { + "index": 1, + "label": "", + "mode": null, + }, + { + "index": 2, + "label": "", + "mode": null, + }, + { + "index": 3, + "label": "", + "mode": null, + }, + ], "timing": { "type": "none", }, @@ -1191,6 +1259,23 @@ exports[`Test recieved mos ingest payloads mosRoStatus: Update ro 1`] = ` "5meLdE_m5k28xXw1vtX2JX8mSYQ_", ], "studioId": "mockStudio4", + "tTimers": [ + { + "index": 1, + "label": "", + "mode": null, + }, + { + "index": 2, + "label": "", + "mode": null, + }, + { + "index": 3, + "label": "", + "mode": null, + }, + ], "timing": { "type": "none", }, @@ -1484,6 +1569,23 @@ exports[`Test recieved mos ingest payloads mosRoStoryDelete: Remove segment 1`] "5meLdE_m5k28xXw1vtX2JX8mSYQ_", ], "studioId": "mockStudio4", + "tTimers": [ + { + "index": 1, + "label": "", + "mode": null, + }, + { + "index": 2, + "label": "", + "mode": null, + }, + { + "index": 3, + "label": "", + "mode": null, + }, + ], "timing": { "type": "none", }, @@ -1745,6 +1847,23 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: Into segment 1`] = "5meLdE_m5k28xXw1vtX2JX8mSYQ_", ], "studioId": "mockStudio4", + "tTimers": [ + { + "index": 1, + "label": "", + "mode": null, + }, + { + "index": 2, + "label": "", + "mode": null, + }, + { + "index": 3, + "label": "", + "mode": null, + }, + ], "timing": { "type": "none", }, @@ -2051,6 +2170,23 @@ exports[`Test recieved mos ingest payloads mosRoStoryInsert: New segment 1`] = ` "5meLdE_m5k28xXw1vtX2JX8mSYQ_", ], "studioId": "mockStudio4", + "tTimers": [ + { + "index": 1, + "label": "", + "mode": null, + }, + { + "index": 2, + "label": "", + "mode": null, + }, + { + "index": 3, + "label": "", + "mode": null, + }, + ], "timing": { "type": "none", }, @@ -2365,6 +2501,23 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Move whole segment to "5meLdE_m5k28xXw1vtX2JX8mSYQ_", ], "studioId": "mockStudio4", + "tTimers": [ + { + "index": 1, + "label": "", + "mode": null, + }, + { + "index": 2, + "label": "", + "mode": null, + }, + { + "index": 3, + "label": "", + "mode": null, + }, + ], "timing": { "type": "none", }, @@ -2662,6 +2815,23 @@ exports[`Test recieved mos ingest payloads mosRoStoryMove: Within segment 1`] = "5meLdE_m5k28xXw1vtX2JX8mSYQ_", ], "studioId": "mockStudio4", + "tTimers": [ + { + "index": 1, + "label": "", + "mode": null, + }, + { + "index": 2, + "label": "", + "mode": null, + }, + { + "index": 3, + "label": "", + "mode": null, + }, + ], "timing": { "type": "none", }, @@ -2959,6 +3129,23 @@ exports[`Test recieved mos ingest payloads mosRoStoryReplace: Same segment 1`] = "5meLdE_m5k28xXw1vtX2JX8mSYQ_", ], "studioId": "mockStudio4", + "tTimers": [ + { + "index": 1, + "label": "", + "mode": null, + }, + { + "index": 2, + "label": "", + "mode": null, + }, + { + "index": 3, + "label": "", + "mode": null, + }, + ], "timing": { "type": "none", }, @@ -3255,6 +3442,23 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments "5meLdE_m5k28xXw1vtX2JX8mSYQ_", ], "studioId": "mockStudio4", + "tTimers": [ + { + "index": 1, + "label": "", + "mode": null, + }, + { + "index": 2, + "label": "", + "mode": null, + }, + { + "index": 3, + "label": "", + "mode": null, + }, + ], "timing": { "type": "none", }, @@ -3544,6 +3748,23 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Swap across segments2 "5meLdE_m5k28xXw1vtX2JX8mSYQ_", ], "studioId": "mockStudio4", + "tTimers": [ + { + "index": 1, + "label": "", + "mode": null, + }, + { + "index": 2, + "label": "", + "mode": null, + }, + { + "index": 3, + "label": "", + "mode": null, + }, + ], "timing": { "type": "none", }, @@ -3865,6 +4086,23 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: With first in same se "5meLdE_m5k28xXw1vtX2JX8mSYQ_", ], "studioId": "mockStudio4", + "tTimers": [ + { + "index": 1, + "label": "", + "mode": null, + }, + { + "index": 2, + "label": "", + "mode": null, + }, + { + "index": 3, + "label": "", + "mode": null, + }, + ], "timing": { "type": "none", }, @@ -4162,6 +4400,23 @@ exports[`Test recieved mos ingest payloads mosRoStorySwap: Within same segment 1 "5meLdE_m5k28xXw1vtX2JX8mSYQ_", ], "studioId": "mockStudio4", + "tTimers": [ + { + "index": 1, + "label": "", + "mode": null, + }, + { + "index": 2, + "label": "", + "mode": null, + }, + { + "index": 3, + "label": "", + "mode": null, + }, + ], "timing": { "type": "none", }, diff --git a/packages/job-worker/src/ingest/packageInfo.ts b/packages/job-worker/src/ingest/packageInfo.ts index 6816a9d4d4..aec2f1f590 100644 --- a/packages/job-worker/src/ingest/packageInfo.ts +++ b/packages/job-worker/src/ingest/packageInfo.ts @@ -1,38 +1,11 @@ import { ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { - ExpectedPackagesRegenerateProps, - PackageInfosUpdatedRundownProps, -} from '@sofie-automation/corelib/dist/worker/ingest' +import { PackageInfosUpdatedRundownProps } from '@sofie-automation/corelib/dist/worker/ingest' import { logger } from '../logging.js' import { JobContext } from '../jobs/index.js' import { regenerateSegmentsFromIngestData } from './generationSegment.js' -import { runWithRundownLock } from './lock.js' -import { updateExpectedPackagesForPartModel, updateExpectedPackagesForRundownBaseline } from './expectedPackages.js' -import { loadIngestModelFromRundown } from './model/implementation/LoadIngestModel.js' import { runCustomIngestUpdateOperation } from './runOperation.js' - -/** - * Debug: Regenerate ExpectedPackages for a Rundown - */ -export async function handleExpectedPackagesRegenerate( - context: JobContext, - data: ExpectedPackagesRegenerateProps -): Promise { - return runWithRundownLock(context, data.rundownId, async (rundown, rundownLock) => { - if (!rundown) throw new Error(`Rundown "${data.rundownId}" not found`) - - const ingestModel = await loadIngestModelFromRundown(context, rundownLock, rundown) - - for (const part of ingestModel.getAllOrderedParts()) { - updateExpectedPackagesForPartModel(context, part) - } - - await updateExpectedPackagesForRundownBaseline(context, ingestModel, undefined, true) - - await ingestModel.saveAllToDatabase() - }) -} +import { assertNever } from '@sofie-automation/corelib/dist/lib' /** * Some PackageInfos have been updated, regenerate any Parts which depend on these PackageInfos @@ -58,23 +31,35 @@ export async function handleUpdatedPackageInfoForRundown( let regenerateRundownBaseline = false for (const packageId of data.packageIds) { - const pkg = ingestModel.findExpectedPackage(packageId) - if (pkg) { - if ( - pkg.fromPieceType === ExpectedPackageDBType.PIECE || - pkg.fromPieceType === ExpectedPackageDBType.ADLIB_PIECE || - pkg.fromPieceType === ExpectedPackageDBType.ADLIB_ACTION - ) { - segmentsToUpdate.add(pkg.segmentId) - } else if ( - pkg.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_ACTION || - pkg.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_PIECE || - pkg.fromPieceType === ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS - ) { - regenerateRundownBaseline = true + const pkgIngestSources = ingestModel.findExpectedPackageIngestSources(packageId) + for (const source of pkgIngestSources) { + // Only consider sources that are marked to listen to package info updates + if (!source.listenToPackageInfoUpdates) continue + + switch (source.fromPieceType) { + case ExpectedPackageDBType.PIECE: + case ExpectedPackageDBType.ADLIB_PIECE: + case ExpectedPackageDBType.ADLIB_ACTION: + segmentsToUpdate.add(source.segmentId) + break + + case ExpectedPackageDBType.BASELINE_PIECE: + case ExpectedPackageDBType.BASELINE_ADLIB_ACTION: + case ExpectedPackageDBType.BASELINE_ADLIB_PIECE: + case ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: + regenerateRundownBaseline = true + break + case ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: + case ExpectedPackageDBType.BUCKET_ADLIB: + case ExpectedPackageDBType.BUCKET_ADLIB_ACTION: + // Ignore + break + default: + assertNever(source) } - } else { - logger.warn(`onUpdatedPackageInfoForRundown: Missing package: "${packageId}"`) + } + if (pkgIngestSources.length === 0) { + logger.warn(`onUpdatedPackageInfoForRundown: Missing ingestSources for package: "${packageId}"`) } } diff --git a/packages/job-worker/src/ingest/runOperation.ts b/packages/job-worker/src/ingest/runOperation.ts index b6353960dd..3bdb1bbf0a 100644 --- a/packages/job-worker/src/ingest/runOperation.ts +++ b/packages/job-worker/src/ingest/runOperation.ts @@ -1,4 +1,4 @@ -import { IngestModel, IngestModelReadonly } from './model/IngestModel.js' +import { IngestDatabasePersistedModel, IngestModel, IngestModelReadonly } from './model/IngestModel.js' import { BeforeIngestOperationPartMap, CommitIngestOperation } from './commit.js' import { SofieIngestRundownDataCache, SofieIngestRundownDataCacheGenerator } from './sofieIngestCache.js' import { canRundownBeUpdated, getRundownId, getSegmentId } from './lib.js' @@ -8,7 +8,6 @@ import { UserError, UserErrorMessage } from '@sofie-automation/corelib/dist/erro import { loadIngestModelFromRundownExternalId } from './model/implementation/LoadIngestModel.js' import { Complete, clone } from '@sofie-automation/corelib/dist/lib' import { CommitIngestData, runWithRundownLockWithoutFetchingRundown } from './lock.js' -import { DatabasePersistedModel } from '../modelBase.js' import { NrcsIngestChangeDetails, IngestRundown, @@ -352,7 +351,7 @@ function sortIngestRundown(rundown: IngestRundown): void { async function updateSofieRundownModel( context: JobContext, - pIngestModel: Promise, + pIngestModel: Promise, computedIngestChanges: ComputedIngestChanges | null ) { const ingestModel = await pIngestModel diff --git a/packages/job-worker/src/ipc.ts b/packages/job-worker/src/ipc.ts index 45fbf89324..4b8b938846 100644 --- a/packages/job-worker/src/ipc.ts +++ b/packages/job-worker/src/ipc.ts @@ -10,14 +10,8 @@ import { getPrometheusMetricsString, setupPrometheusMetrics } from '@sofie-autom */ class IpcJobManager implements JobManager { constructor( - public readonly jobFinished: ( - id: string, - startedTime: number, - finishedTime: number, - error: any, - result: any - ) => Promise, - public readonly queueJob: (queueName: string, jobName: string, jobData: unknown) => Promise, + public readonly jobFinished: JobManager['jobFinished'], + public readonly queueJob: JobManager['queueJob'], private readonly interruptJobStream: (queueName: string) => Promise, private readonly waitForNextJob: (queueName: string) => Promise, private readonly getNextJob: (queueName: string) => Promise @@ -43,11 +37,11 @@ class IpcJobManager implements JobManager { export class IpcJobWorker extends JobWorkerBase { constructor( workerId: WorkerId, - jobFinished: (id: string, startedTime: number, finishedTime: number, error: any, result: any) => Promise, + jobFinished: JobManager['jobFinished'], interruptJobStream: (queueName: string) => Promise, waitForNextJob: (queueName: string) => Promise, getNextJob: (queueName: string) => Promise, - queueJob: (queueName: string, jobName: string, jobData: unknown) => Promise, + queueJob: JobManager['queueJob'], logLine: (msg: LogEntry) => Promise, fastTrackTimeline: FastTrackTimelineFunc, enableFreezeLimit: boolean diff --git a/packages/job-worker/src/jobs/index.ts b/packages/job-worker/src/jobs/index.ts index aebd415538..78bd930ea6 100644 --- a/packages/job-worker/src/jobs/index.ts +++ b/packages/job-worker/src/jobs/index.ts @@ -24,6 +24,19 @@ export { ApmSpan } export { ProcessedShowStyleVariant, ProcessedShowStyleBase, ProcessedShowStyleCompound } export { JobStudio } +export interface QueueJobOptions { + /** + * The job should be run with a low priority, allowing other operations to be run first + */ + lowPriority?: boolean + + /** + * Debounce execution, delaying execution for at least this wait time (in ms). + * If the job is already queued, it will not be queued again + */ + debounce?: number +} + /** * Context for any job run in the job-worker */ @@ -54,7 +67,11 @@ export interface JobContext extends StudioCacheContext { * @param data Data for the job * @returns Promise which resolves once successfully queued */ - queueStudioJob(name: T, data: Parameters[0]): Promise + queueStudioJob( + name: T, + data: Parameters[0], + options?: QueueJobOptions + ): Promise /** * Queue an Event job to be run * It is not possible to wait for the result. This ensures the threads don't get deadlocked diff --git a/packages/job-worker/src/manager.ts b/packages/job-worker/src/manager.ts index ef7bc8697d..745ba868dd 100644 --- a/packages/job-worker/src/manager.ts +++ b/packages/job-worker/src/manager.ts @@ -1,5 +1,6 @@ -import { WorkerId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { JobSpec } from './main.js' +import type { WorkerId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import type { JobSpec } from './main.js' +import type { QueueJobOptions } from './jobs/index.js' export interface JobManager { jobFinished: ( @@ -10,7 +11,12 @@ export interface JobManager { result: any ) => Promise // getNextJob: (queueName: string) => Promise - queueJob: (queueName: string, jobName: string, jobData: unknown) => Promise + queueJob: ( + queueName: string, + jobName: string, + jobData: unknown, + options: QueueJobOptions | undefined + ) => Promise subscribeToQueue: (queueName: string, workerId: WorkerId) => JobStream } diff --git a/packages/job-worker/src/playout/__tests__/__snapshots__/playout.test.ts.snap b/packages/job-worker/src/playout/__tests__/__snapshots__/playout.test.ts.snap index d99635086b..8017111a4f 100644 --- a/packages/job-worker/src/playout/__tests__/__snapshots__/playout.test.ts.snap +++ b/packages/job-worker/src/playout/__tests__/__snapshots__/playout.test.ts.snap @@ -77,6 +77,23 @@ exports[`Playout API Basic rundown control 4`] = ` "resetTime": 0, "rundownIdsInOrder": [], "studioId": "mockStudio0", + "tTimers": [ + { + "index": 1, + "label": "", + "mode": null, + }, + { + "index": 2, + "label": "", + "mode": null, + }, + { + "index": 3, + "label": "", + "mode": null, + }, + ], "timing": { "type": "none", }, diff --git a/packages/job-worker/src/playout/__tests__/expectedPackages.test.ts b/packages/job-worker/src/playout/__tests__/expectedPackages.test.ts new file mode 100644 index 0000000000..dd2c1888f8 --- /dev/null +++ b/packages/job-worker/src/playout/__tests__/expectedPackages.test.ts @@ -0,0 +1,509 @@ +import { + ExpectedPackageId, + PieceInstanceId, + RundownId, + RundownPlaylistId, +} from '@sofie-automation/corelib/dist/dataModel/Ids' +import { protectString, protectStringArray } from '@sofie-automation/corelib/dist/protectedString' +import { MockJobContext, setupDefaultJobEnvironment } from '../../__mocks__/context.js' +import { setupDefaultRundownPlaylist, setupMockShowStyleCompound } from '../../__mocks__/presetCollections.js' +import { handleCleanupOrphanedExpectedPackageReferences } from '../expectedPackages.js' +import { ExpectedPackageDB, ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' +import { getCurrentTime } from '../../lib/index.js' + +describe('handleCleanupOrphanedExpectedPackageReferences', () => { + let context: MockJobContext + let rundownId: RundownId + let playlistId: RundownPlaylistId + + beforeEach(async () => { + context = setupDefaultJobEnvironment() + + // Setup showstyle so we can create a rundown + await setupMockShowStyleCompound(context) + const result = await setupDefaultRundownPlaylist(context) + rundownId = result.rundownId + playlistId = result.playlistId + }) + + function createMockExpectedPackage( + id: string, + pieceInstanceIds: string[], + ingestSourceCount: number = 0 + ): ExpectedPackageDB { + const ingestSources = [] + for (let i = 0; i < ingestSourceCount; i++) { + ingestSources.push({ + fromPieceType: ExpectedPackageDBType.PIECE, + blueprintPackageId: `blueprint_${id}_${i}`, + listenToPackageInfoUpdates: false, + pieceId: protectString(`piece_${id}_${i}`), + partId: protectString(`part_${id}_${i}`), + segmentId: protectString(`segment_${id}_${i}`), + }) + } + + return { + _id: protectString(id), + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + created: getCurrentTime(), + package: { + _id: id, + contentVersionHash: 'hash1', + type: 'media_file' as any, + content: {}, + version: {}, + sources: [], + layers: [], + sideEffect: {}, + }, + ingestSources: ingestSources as ExpectedPackageDB['ingestSources'], + playoutSources: { + pieceInstanceIds: protectStringArray(pieceInstanceIds), + }, + } + } + + function createMockPieceInstance( + id: string, + neededPackageIds: string[] = [], + reset: boolean = false + ): Partial { + return { + _id: protectString(id), + rundownId: rundownId, + partInstanceId: protectString('partInstance_0'), + playlistActivationId: protectString('activation_0'), + reset: reset, + neededExpectedPackageIds: protectStringArray(neededPackageIds), + piece: { + _id: protectString(`piece_${id}`), + startPartId: protectString('part_0'), + externalId: `MOCK_PIECE_${id}`, + name: `Piece ${id}`, + lifespan: 'WithinPart' as any, + invalid: false, + enable: { start: 0 }, + sourceLayerId: 'source0', + outputLayerId: 'output0', + content: {}, + timelineObjectsString: '' as any, + pieceType: 'Normal' as any, + }, + } + } + + test('does nothing when there are no expected packages', async () => { + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([]) + }) + + test('does nothing when all package references are valid', async () => { + // Create piece instances that reference expected packages + await context.mockCollections.PieceInstances.insertOne( + createMockPieceInstance('pieceInstance1', ['package1']) as any + ) + await context.mockCollections.PieceInstances.insertOne( + createMockPieceInstance('pieceInstance2', ['package2']) as any + ) + + // Create expected packages that are referenced by valid piece instances + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package1', ['pieceInstance1']) + ) + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package2', ['pieceInstance2']) + ) + + // Clear operations from setup + context.mockCollections.ExpectedPackages.clearOpLog() + + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages (only read, no writes) + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([]) + + // Verify packages remain unchanged + const packages = await context.directCollections.ExpectedPackages.findFetch({}) + expect(packages).toHaveLength(2) + expect(packages.find((p) => p._id === protectString('package1'))?.playoutSources.pieceInstanceIds).toEqual([ + protectString('pieceInstance1'), + ]) + expect(packages.find((p) => p._id === protectString('package2'))?.playoutSources.pieceInstanceIds).toEqual([ + protectString('pieceInstance2'), + ]) + }) + + test('removes orphaned package reference when piece instance no longer exists', async () => { + // Create expected package that references a piece instance that doesn't exist + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package1', ['nonExistentPieceInstance'], 1) // has ingest source + ) + + // Clear operations from setup + context.mockCollections.ExpectedPackages.clearOpLog() + + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([ + { type: 'bulkWrite', args: [1] }, + { type: 'update', args: [{ _id: 'package1' }, { $set: { 'playoutSources.pieceInstanceIds': [] } }] }, + ]) + }) + + test('deletes package when all references are orphaned and no ingest sources', async () => { + // Create expected package with no ingest sources and orphaned piece instance + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package1', ['nonExistentPieceInstance'], 0) // no ingest sources + ) + + // Clear operations from setup + context.mockCollections.ExpectedPackages.clearOpLog() + + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([ + { type: 'bulkWrite', args: [1] }, + { type: 'removeOne', args: [{ _id: 'package1' }] }, + ]) + }) + + test('removes only orphaned references when partial removal is needed', async () => { + // Create a valid piece instance + await context.mockCollections.PieceInstances.insertOne( + createMockPieceInstance('validPieceInstance', ['package1']) as any + ) + + // Create expected package that references both valid and invalid piece instances + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package1', ['validPieceInstance', 'orphanedPieceInstance']) + ) + + // Clear operations from setup + context.mockCollections.ExpectedPackages.clearOpLog() + + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([ + { type: 'bulkWrite', args: [1] }, + { + type: 'update', + args: [ + { _id: 'package1' }, + { $pull: { 'playoutSources.pieceInstanceIds': { $in: ['orphanedPieceInstance'] } } }, + ], + }, + ]) + + // Verify the result - only valid reference remains + const packages = await context.directCollections.ExpectedPackages.findFetch({}) + expect(packages).toHaveLength(1) + expect(packages[0].playoutSources.pieceInstanceIds).toEqual([protectString('validPieceInstance')]) + }) + + test('removes reference when piece instance exists but does not reference the package', async () => { + // Create piece instance that references a different package + await context.mockCollections.PieceInstances.insertOne( + createMockPieceInstance('pieceInstance1', ['differentPackage']) as any + ) + + // Create expected package that references the piece instance, but piece instance doesn't reference it back + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package1', ['pieceInstance1'], 1) // has ingest source + ) + + // Clear operations from setup + context.mockCollections.ExpectedPackages.clearOpLog() + + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([ + { type: 'bulkWrite', args: [1] }, + { type: 'update', args: [{ _id: 'package1' }, { $set: { 'playoutSources.pieceInstanceIds': [] } }] }, + ]) + }) + + test('deletes package when reset piece instance references the package', async () => { + // Create a reset piece instance + await context.mockCollections.PieceInstances.insertOne( + createMockPieceInstance('resetPieceInstance', ['package1'], true) as any + ) + + // Create expected package that references the reset piece instance + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package1', ['resetPieceInstance'], 0) // no ingest sources + ) + + // Clear operations from setup + context.mockCollections.ExpectedPackages.clearOpLog() + + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([ + { type: 'bulkWrite', args: [1] }, + { type: 'removeOne', args: [{ _id: 'package1' }] }, + ]) + }) + + test('handles multiple packages with mixed scenarios', async () => { + // Create valid piece instances + await context.mockCollections.PieceInstances.insertOne( + createMockPieceInstance('pieceInstance1', ['package1', 'package3']) as any + ) + await context.mockCollections.PieceInstances.insertOne( + createMockPieceInstance('pieceInstance2', ['package2']) as any + ) + + // Package1: valid reference, should be kept as-is + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package1', ['pieceInstance1']) + ) + + // Package2: valid reference, should be kept as-is + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package2', ['pieceInstance2']) + ) + + // Package3: valid + orphaned reference, should have orphaned removed + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package3', ['pieceInstance1', 'orphanedInstance']) + ) + + // Package4: all orphaned, no ingest sources, should be deleted + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package4', ['orphanedInstance1', 'orphanedInstance2'], 0) + ) + + // Package5: all orphaned, has ingest sources, should be kept with empty pieceInstanceIds + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package5', ['orphanedInstance3'], 1) + ) + + // Clear operations from setup + context.mockCollections.ExpectedPackages.clearOpLog() + + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([ + { type: 'bulkWrite', args: [3] }, + { + type: 'update', + args: [ + { _id: 'package3' }, + { $pull: { 'playoutSources.pieceInstanceIds': { $in: ['orphanedInstance'] } } }, + ], + }, + { type: 'removeOne', args: [{ _id: 'package4' }] }, + { type: 'update', args: [{ _id: 'package5' }, { $set: { 'playoutSources.pieceInstanceIds': [] } }] }, + ]) + + const packages = await context.directCollections.ExpectedPackages.findFetch({}) + expect(packages).toHaveLength(4) // package4 is deleted + + const package1Id = protectString('package1') + const package2Id = protectString('package2') + const package3Id = protectString('package3') + const package4Id = protectString('package4') + const package5Id = protectString('package5') + + const package1 = packages.find((p) => p._id === package1Id) + const package2 = packages.find((p) => p._id === package2Id) + const package3 = packages.find((p) => p._id === package3Id) + const package4 = packages.find((p) => p._id === package4Id) + const package5 = packages.find((p) => p._id === package5Id) + + // Packages with only valid references should be unchanged + expect(package1?.playoutSources.pieceInstanceIds).toEqual([protectString('pieceInstance1')]) + expect(package2?.playoutSources.pieceInstanceIds).toEqual([protectString('pieceInstance2')]) + // Package3 should have orphaned reference removed, valid one kept + expect(package3?.playoutSources.pieceInstanceIds).toEqual([protectString('pieceInstance1')]) + // Package4 should be deleted + expect(package4).toBeUndefined() + // Package5 should have pieceInstanceIds cleared + expect(package5?.playoutSources.pieceInstanceIds).toEqual([]) + }) + + test('deletes rundown package when orphaned while keeping bucket and other rundown packages', async () => { + const otherRundownId = protectString('otherRundown') + + // Create expected package for a different rundown (should not be affected) + const otherRundownPackage = createMockExpectedPackage('packageOther', ['orphanedPieceInstance']) + otherRundownPackage.rundownId = otherRundownId + await context.mockCollections.ExpectedPackages.insertOne(otherRundownPackage) + + // Create expected package for current rundown with orphaned reference + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('packageCurrent', ['orphanedPieceInstance'], 0) + ) + + // Clear operations from setup + context.mockCollections.ExpectedPackages.clearOpLog() + + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([ + { type: 'bulkWrite', args: [1] }, + { type: 'removeOne', args: [{ _id: 'packageCurrent' }] }, + ]) + + const packages = await context.directCollections.ExpectedPackages.findFetch({}) + // packageCurrent is deleted, packageOther remains + expect(packages).toHaveLength(1) + + const packageOther = packages.find((p) => p._id === protectString('packageOther')) + const packageCurrent = packages.find((p) => p._id === protectString('packageCurrent')) + + // packageOther should be untouched (has different rundownId) + expect(packageOther?.playoutSources.pieceInstanceIds).toEqual([protectString('orphanedPieceInstance')]) + // packageCurrent should be deleted + expect(packageCurrent).toBeUndefined() + }) + + test('deletes rundown package when orphaned while keeping bucket packages', async () => { + // Create a bucket package (should not be affected since it has bucketId set) + const bucketPackage = createMockExpectedPackage('bucketPackage', ['orphanedPieceInstance']) + bucketPackage.rundownId = null + bucketPackage.bucketId = protectString('bucket1') + await context.mockCollections.ExpectedPackages.insertOne(bucketPackage) + + // Create expected package for current rundown with orphaned reference + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('rundownPackage', ['orphanedPieceInstance'], 0) + ) + + // Clear operations from setup + context.mockCollections.ExpectedPackages.clearOpLog() + + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([ + { type: 'bulkWrite', args: [1] }, + { type: 'removeOne', args: [{ _id: 'rundownPackage' }] }, + ]) + + const packages = await context.directCollections.ExpectedPackages.findFetch({}) + // rundownPackage is deleted, bucketPackage remains + expect(packages).toHaveLength(1) + + const bucketPkg = packages.find((p) => p._id === protectString('bucketPackage')) + const rundownPkg = packages.find((p) => p._id === protectString('rundownPackage')) + + // bucketPackage should be untouched (has different bucketId, not matched by query) + expect(bucketPkg?.playoutSources.pieceInstanceIds).toEqual([protectString('orphanedPieceInstance')]) + // rundownPackage should be deleted + expect(rundownPkg).toBeUndefined() + }) + + test('handles package with no piece instance references', async () => { + // Create expected package with no piece instance references + await context.mockCollections.ExpectedPackages.insertOne(createMockExpectedPackage('package1', [], 1)) + + // Clear operations from setup + context.mockCollections.ExpectedPackages.clearOpLog() + + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages + // The function updates even if pieceInstanceIds is already empty + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([ + { type: 'bulkWrite', args: [1] }, + { type: 'update', args: [{ _id: 'package1' }, { $set: { 'playoutSources.pieceInstanceIds': [] } }] }, + ]) + }) + + test('handles piece instance with no neededExpectedPackageIds', async () => { + // Create piece instance with no neededExpectedPackageIds + const pieceInstance = createMockPieceInstance('pieceInstance1', []) + delete pieceInstance.neededExpectedPackageIds + await context.mockCollections.PieceInstances.insertOne(pieceInstance as any) + + // Create expected package that references this piece instance + await context.mockCollections.ExpectedPackages.insertOne( + createMockExpectedPackage('package1', ['pieceInstance1'], 1) + ) + + // Clear operations from setup + context.mockCollections.ExpectedPackages.clearOpLog() + + await handleCleanupOrphanedExpectedPackageReferences(context, { + playlistId: playlistId, + rundownId: rundownId, + }) + + // Verify the operations performed on ExpectedPackages + const ops = context.mockCollections.ExpectedPackages.operations + expect(ops[0].type).toBe('findFetch') + expect(ops.slice(1)).toEqual([ + { type: 'bulkWrite', args: [1] }, + { type: 'update', args: [{ _id: 'package1' }, { $set: { 'playoutSources.pieceInstanceIds': [] } }] }, + ]) + }) +}) diff --git a/packages/job-worker/src/playout/__tests__/tTimers.test.ts b/packages/job-worker/src/playout/__tests__/tTimers.test.ts new file mode 100644 index 0000000000..144baca1a5 --- /dev/null +++ b/packages/job-worker/src/playout/__tests__/tTimers.test.ts @@ -0,0 +1,601 @@ +/* eslint-disable @typescript-eslint/no-non-null-assertion */ +import { useFakeCurrentTime, useRealCurrentTime, adjustFakeTime } from '../../__mocks__/time.js' +import { + validateTTimerIndex, + pauseTTimer, + resumeTTimer, + restartTTimer, + createCountdownTTimer, + createFreeRunTTimer, + calculateTTimerCurrentTime, + calculateNextTimeOfDayTarget, + createTimeOfDayTTimer, +} from '../tTimers.js' +import type { + RundownTTimerMode, + RundownTTimerModeTimeOfDay, +} from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' + +describe('tTimers utils', () => { + beforeEach(() => { + useFakeCurrentTime(10000) // Set a fixed time for tests + }) + + afterEach(() => { + useRealCurrentTime() + }) + + describe('validateTTimerIndex', () => { + it('should accept valid indices 1, 2, 3', () => { + expect(() => validateTTimerIndex(1)).not.toThrow() + expect(() => validateTTimerIndex(2)).not.toThrow() + expect(() => validateTTimerIndex(3)).not.toThrow() + }) + + it('should reject index 0', () => { + expect(() => validateTTimerIndex(0)).toThrow('T-timer index out of range: 0') + }) + + it('should reject index 4', () => { + expect(() => validateTTimerIndex(4)).toThrow('T-timer index out of range: 4') + }) + + it('should reject negative indices', () => { + expect(() => validateTTimerIndex(-1)).toThrow('T-timer index out of range: -1') + }) + + it('should reject NaN', () => { + expect(() => validateTTimerIndex(NaN)).toThrow('T-timer index out of range: NaN') + }) + }) + + describe('pauseTTimer', () => { + it('should pause a running countdown timer', () => { + const timer: RundownTTimerMode = { + type: 'countdown', + startTime: 5000, + pauseTime: null, + duration: 60000, + stopAtZero: true, + } + + const result = pauseTTimer(timer) + + expect(result).toEqual({ + type: 'countdown', + startTime: 5000, + pauseTime: 10000, // getCurrentTime() + duration: 60000, + stopAtZero: true, + }) + }) + + it('should pause a running freeRun timer', () => { + const timer: RundownTTimerMode = { + type: 'freeRun', + startTime: 5000, + pauseTime: null, + } + + const result = pauseTTimer(timer) + + expect(result).toEqual({ + type: 'freeRun', + startTime: 5000, + pauseTime: 10000, + }) + }) + + it('should return unchanged countdown timer if already paused', () => { + const timer: RundownTTimerMode = { + type: 'countdown', + startTime: 5000, + pauseTime: 7000, // already paused + duration: 60000, + stopAtZero: true, + } + + const result = pauseTTimer(timer) + + expect(result).toBe(timer) // same reference, unchanged + }) + + it('should return unchanged freeRun timer if already paused', () => { + const timer: RundownTTimerMode = { + type: 'freeRun', + startTime: 5000, + pauseTime: 7000, // already paused + } + + const result = pauseTTimer(timer) + + expect(result).toBe(timer) // same reference, unchanged + }) + + it('should return null for null timer', () => { + expect(pauseTTimer(null)).toBeNull() + }) + }) + + describe('resumeTTimer', () => { + it('should resume a paused countdown timer', () => { + const timer: RundownTTimerMode = { + type: 'countdown', + startTime: 5000, + pauseTime: 8000, // paused 3 seconds after start + duration: 60000, + stopAtZero: true, + } + + const result = resumeTTimer(timer) + + // pausedOffset = 5000 - 8000 = -3000 + // newStartTime = 10000 + (-3000) = 7000 + expect(result).toEqual({ + type: 'countdown', + startTime: 7000, // 3 seconds before now + pauseTime: null, + duration: 60000, + stopAtZero: true, + }) + }) + + it('should resume a paused freeRun timer', () => { + const timer: RundownTTimerMode = { + type: 'freeRun', + startTime: 2000, + pauseTime: 6000, // paused 4 seconds after start + } + + const result = resumeTTimer(timer) + + // pausedOffset = 2000 - 6000 = -4000 + // newStartTime = 10000 + (-4000) = 6000 + expect(result).toEqual({ + type: 'freeRun', + startTime: 6000, // 4 seconds before now + pauseTime: null, + }) + }) + + it('should return countdown timer unchanged if already running', () => { + const timer: RundownTTimerMode = { + type: 'countdown', + startTime: 5000, + pauseTime: null, // already running + duration: 60000, + stopAtZero: true, + } + + const result = resumeTTimer(timer) + + expect(result).toBe(timer) // same reference + }) + + it('should return freeRun timer unchanged if already running', () => { + const timer: RundownTTimerMode = { + type: 'freeRun', + startTime: 5000, + pauseTime: null, // already running + } + + const result = resumeTTimer(timer) + + expect(result).toBe(timer) // same reference + }) + + it('should return null for null timer', () => { + expect(resumeTTimer(null)).toBeNull() + }) + }) + + describe('restartTTimer', () => { + it('should restart a running countdown timer', () => { + const timer: RundownTTimerMode = { + type: 'countdown', + startTime: 5000, + pauseTime: null, + duration: 60000, + stopAtZero: true, + } + + const result = restartTTimer(timer) + + expect(result).toEqual({ + type: 'countdown', + startTime: 10000, // now + pauseTime: null, + duration: 60000, + stopAtZero: true, + }) + }) + + it('should restart a paused countdown timer (stays paused)', () => { + const timer: RundownTTimerMode = { + type: 'countdown', + startTime: 5000, + pauseTime: 8000, + duration: 60000, + stopAtZero: false, + } + + const result = restartTTimer(timer) + + expect(result).toEqual({ + type: 'countdown', + startTime: 10000, // now + pauseTime: 10000, // also now (paused at start) + duration: 60000, + stopAtZero: false, + }) + }) + + it('should return null for freeRun timer', () => { + const timer: RundownTTimerMode = { + type: 'freeRun', + startTime: 5000, + pauseTime: null, + } + + expect(restartTTimer(timer)).toBeNull() + }) + + it('should return null for null timer', () => { + expect(restartTTimer(null)).toBeNull() + }) + }) + + describe('createCountdownTTimer', () => { + it('should create a running countdown timer', () => { + const result = createCountdownTTimer(60000, { + stopAtZero: true, + startPaused: false, + }) + + expect(result).toEqual({ + type: 'countdown', + startTime: 10000, + pauseTime: null, + duration: 60000, + stopAtZero: true, + }) + }) + + it('should create a paused countdown timer', () => { + const result = createCountdownTTimer(30000, { + stopAtZero: false, + startPaused: true, + }) + + expect(result).toEqual({ + type: 'countdown', + startTime: 10000, + pauseTime: 10000, + duration: 30000, + stopAtZero: false, + }) + }) + + it('should throw for zero duration', () => { + expect(() => + createCountdownTTimer(0, { + stopAtZero: true, + startPaused: false, + }) + ).toThrow('Duration must be greater than zero') + }) + + it('should throw for negative duration', () => { + expect(() => + createCountdownTTimer(-1000, { + stopAtZero: true, + startPaused: false, + }) + ).toThrow('Duration must be greater than zero') + }) + }) + + describe('createFreeRunTTimer', () => { + it('should create a running freeRun timer', () => { + const result = createFreeRunTTimer({ startPaused: false }) + + expect(result).toEqual({ + type: 'freeRun', + startTime: 10000, + pauseTime: null, + }) + }) + + it('should create a paused freeRun timer', () => { + const result = createFreeRunTTimer({ startPaused: true }) + + expect(result).toEqual({ + type: 'freeRun', + startTime: 10000, + pauseTime: 10000, + }) + }) + }) + + describe('calculateTTimerCurrentTime', () => { + it('should calculate time for a running timer', () => { + // Timer started at 5000, current time is 10000 + const result = calculateTTimerCurrentTime(5000, null) + + expect(result).toBe(5000) // 10000 - 5000 + }) + + it('should calculate time for a paused timer', () => { + // Timer started at 5000, paused at 8000 + const result = calculateTTimerCurrentTime(5000, 8000) + + expect(result).toBe(3000) // 8000 - 5000 + }) + + it('should handle timer that just started', () => { + const result = calculateTTimerCurrentTime(10000, null) + + expect(result).toBe(0) + }) + + it('should handle timer paused immediately', () => { + const result = calculateTTimerCurrentTime(10000, 10000) + + expect(result).toBe(0) + }) + + it('should update as time progresses', () => { + const startTime = 5000 + + expect(calculateTTimerCurrentTime(startTime, null)).toBe(5000) + + adjustFakeTime(2000) // Now at 12000 + + expect(calculateTTimerCurrentTime(startTime, null)).toBe(7000) + }) + }) + + describe('calculateNextTimeOfDayTarget', () => { + // Mock date to 2026-01-19 10:00:00 UTC for predictable tests + const MOCK_DATE = new Date('2026-01-19T10:00:00Z').getTime() + + beforeEach(() => { + jest.useFakeTimers() + jest.setSystemTime(MOCK_DATE) + }) + + afterEach(() => { + jest.useRealTimers() + }) + + it('should return number input unchanged (unix timestamp)', () => { + const timestamp = 1737331200000 // Some future timestamp + expect(calculateNextTimeOfDayTarget(timestamp)).toBe(timestamp) + }) + + it('should return null for null/undefined/empty input', () => { + expect(calculateNextTimeOfDayTarget('' as string)).toBeNull() + expect(calculateNextTimeOfDayTarget(' ')).toBeNull() + }) + + // 24-hour time formats + it('should parse 24-hour time HH:mm', () => { + const result = calculateNextTimeOfDayTarget('13:34') + expect(result).not.toBeNull() + expect(new Date(result!).toISOString()).toBe('2026-01-19T13:34:00.000Z') + }) + + it('should parse 24-hour time H:mm (single digit hour)', () => { + const result = calculateNextTimeOfDayTarget('9:05') + expect(result).not.toBeNull() + // 9:05 is in the past (before 10:00), so chrono bumps to tomorrow + expect(new Date(result!).toISOString()).toBe('2026-01-20T09:05:00.000Z') + }) + + it('should parse 24-hour time with seconds HH:mm:ss', () => { + const result = calculateNextTimeOfDayTarget('14:30:45') + expect(result).not.toBeNull() + expect(new Date(result!).toISOString()).toBe('2026-01-19T14:30:45.000Z') + }) + + // 12-hour time formats + it('should parse 12-hour time with pm', () => { + const result = calculateNextTimeOfDayTarget('5:13pm') + expect(result).not.toBeNull() + expect(new Date(result!).toISOString()).toBe('2026-01-19T17:13:00.000Z') + }) + + it('should parse 12-hour time with PM (uppercase)', () => { + const result = calculateNextTimeOfDayTarget('5:13PM') + expect(result).not.toBeNull() + expect(new Date(result!).toISOString()).toBe('2026-01-19T17:13:00.000Z') + }) + + it('should parse 12-hour time with am', () => { + const result = calculateNextTimeOfDayTarget('9:30am') + expect(result).not.toBeNull() + // 9:30am is in the past (before 10:00), so chrono bumps to tomorrow + expect(new Date(result!).toISOString()).toBe('2026-01-20T09:30:00.000Z') + }) + + it('should parse 12-hour time with space before am/pm', () => { + const result = calculateNextTimeOfDayTarget('3:45 pm') + expect(result).not.toBeNull() + expect(new Date(result!).toISOString()).toBe('2026-01-19T15:45:00.000Z') + }) + + it('should parse 12-hour time with seconds', () => { + const result = calculateNextTimeOfDayTarget('11:30:15pm') + expect(result).not.toBeNull() + expect(new Date(result!).toISOString()).toBe('2026-01-19T23:30:15.000Z') + }) + + // Date + time formats + it('should parse date with time (slash separator)', () => { + const result = calculateNextTimeOfDayTarget('1/19/2026 15:43') + expect(result).not.toBeNull() + expect(new Date(result!).toISOString()).toBe('2026-01-19T15:43:00.000Z') + }) + + it('should parse date with time and seconds', () => { + const result = calculateNextTimeOfDayTarget('1/19/2026 15:43:30') + expect(result).not.toBeNull() + expect(new Date(result!).toISOString()).toBe('2026-01-19T15:43:30.000Z') + }) + + it('should parse date with 12-hour time', () => { + const result = calculateNextTimeOfDayTarget('1/19/2026 3:43pm') + expect(result).not.toBeNull() + expect(new Date(result!).toISOString()).toBe('2026-01-19T15:43:00.000Z') + }) + + // ISO 8601 format + it('should parse ISO 8601 format', () => { + const result = calculateNextTimeOfDayTarget('2026-01-19T15:43:00') + expect(result).not.toBeNull() + expect(new Date(result!).toISOString()).toBe('2026-01-19T15:43:00.000Z') + }) + + it('should parse ISO 8601 with timezone', () => { + const result = calculateNextTimeOfDayTarget('2026-01-19T15:43:00+01:00') + expect(result).not.toBeNull() + // +01:00 means the time is 1 hour ahead of UTC, so 15:43 +01:00 = 14:43 UTC + expect(new Date(result!).toISOString()).toBe('2026-01-19T14:43:00.000Z') + }) + + // Natural language formats (chrono-node strength) + it('should parse natural language date', () => { + const result = calculateNextTimeOfDayTarget('January 19, 2026 at 3:30pm') + expect(result).not.toBeNull() + expect(new Date(result!).toISOString()).toBe('2026-01-19T15:30:00.000Z') + }) + + it('should parse "noon"', () => { + const result = calculateNextTimeOfDayTarget('noon') + expect(result).not.toBeNull() + expect(new Date(result!).toISOString()).toBe('2026-01-19T12:00:00.000Z') + }) + + it('should parse "midnight"', () => { + const result = calculateNextTimeOfDayTarget('midnight') + expect(result).not.toBeNull() + // Midnight is in the past (before 10:00), so chrono bumps to tomorrow + expect(new Date(result!).toISOString()).toBe('2026-01-20T00:00:00.000Z') + }) + + // Edge cases + it('should return null for invalid time string', () => { + expect(calculateNextTimeOfDayTarget('not a time')).toBeNull() + }) + + it('should return null for gibberish', () => { + expect(calculateNextTimeOfDayTarget('asdfghjkl')).toBeNull() + }) + }) + + describe('createTimeOfDayTTimer', () => { + // Mock date to 2026-01-19 10:00:00 UTC for predictable tests + const MOCK_DATE = new Date('2026-01-19T10:00:00Z').getTime() + + beforeEach(() => { + jest.useFakeTimers() + jest.setSystemTime(MOCK_DATE) + }) + + afterEach(() => { + jest.useRealTimers() + }) + + it('should create a timeOfDay timer with valid time string', () => { + const result = createTimeOfDayTTimer('15:30', { stopAtZero: true }) + + expect(result).toEqual({ + type: 'timeOfDay', + stopAtZero: true, + targetTime: expect.any(Number), // new target time + targetRaw: '15:30', + }) + }) + + it('should create a timeOfDay timer with numeric timestamp', () => { + const timestamp = 1737331200000 + const result = createTimeOfDayTTimer(timestamp, { stopAtZero: false }) + + expect(result).toEqual({ + type: 'timeOfDay', + targetTime: timestamp, + targetRaw: timestamp, + stopAtZero: false, + }) + }) + + it('should throw for invalid time string', () => { + expect(() => createTimeOfDayTTimer('invalid', { stopAtZero: true })).toThrow( + 'Unable to parse target time for timeOfDay T-timer' + ) + }) + + it('should throw for empty string', () => { + expect(() => createTimeOfDayTTimer('', { stopAtZero: true })).toThrow( + 'Unable to parse target time for timeOfDay T-timer' + ) + }) + }) + + describe('restartTTimer with timeOfDay', () => { + // Mock date to 2026-01-19 10:00:00 UTC for predictable tests + const MOCK_DATE = new Date('2026-01-19T10:00:00Z').getTime() + + beforeEach(() => { + jest.useFakeTimers() + jest.setSystemTime(MOCK_DATE) + }) + + afterEach(() => { + jest.useRealTimers() + }) + + it('should restart a timeOfDay timer with valid targetRaw', () => { + const timer: RundownTTimerMode = { + type: 'timeOfDay', + targetTime: 1737300000000, + targetRaw: '15:30', + stopAtZero: true, + } + + const result = restartTTimer(timer) + + expect(result).toEqual({ + ...timer, + targetTime: expect.any(Number), // new target time + }) + expect((result as RundownTTimerModeTimeOfDay).targetTime).toBeGreaterThan(timer.targetTime) + }) + + it('should return null for timeOfDay timer with invalid targetRaw', () => { + const timer: RundownTTimerMode = { + type: 'timeOfDay', + targetTime: 1737300000000, + targetRaw: 'invalid', + stopAtZero: true, + } + + const result = restartTTimer(timer) + + expect(result).toBeNull() + }) + + it('should return null for timeOfDay timer with unix timestamp', () => { + const timer: RundownTTimerMode = { + type: 'timeOfDay', + targetTime: 1737300000000, + targetRaw: 1737300000000, + stopAtZero: true, + } + + const result = restartTTimer(timer) + + expect(result).toBeNull() + }) + }) +}) diff --git a/packages/job-worker/src/playout/adlibAction.ts b/packages/job-worker/src/playout/adlibAction.ts index 36eaa59a10..cc9c338241 100644 --- a/packages/job-worker/src/playout/adlibAction.ts +++ b/packages/job-worker/src/playout/adlibAction.ts @@ -76,17 +76,6 @@ export async function executeAdlibActionAndSaveModel( throw UserError.create(UserErrorMessage.ActionsNotSupported) } - const watchedPackages = await WatchedPackagesHelper.create(context, { - pieceId: data.actionDocId, - fromPieceType: { - $in: [ - ExpectedPackageDBType.ADLIB_ACTION, - ExpectedPackageDBType.BASELINE_ADLIB_ACTION, - ExpectedPackageDBType.BUCKET_ADLIB_ACTION, - ], - }, - }) - const [adLibAction, baselineAdLibAction, bucketAdLibAction] = await Promise.all([ context.directCollections.AdLibActions.findOne(data.actionDocId as AdLibActionId, { projection: { _id: 1, privateData: 1 }, @@ -103,6 +92,21 @@ export async function executeAdlibActionAndSaveModel( ]) const adLibActionDoc = adLibAction ?? baselineAdLibAction ?? bucketAdLibAction + let watchedPackages = WatchedPackagesHelper.empty(context) + if (adLibActionDoc && 'rundownId' in adLibActionDoc) { + watchedPackages = await WatchedPackagesHelper.create(context, adLibActionDoc.rundownId, null, { + fromPieceType: { + $in: [ExpectedPackageDBType.ADLIB_ACTION, ExpectedPackageDBType.BASELINE_ADLIB_ACTION], + }, + pieceId: data.actionDocId, + }) + } else if (adLibActionDoc && 'bucketId' in adLibActionDoc) { + watchedPackages = await WatchedPackagesHelper.create(context, null, adLibActionDoc.bucketId, { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION, + pieceId: data.actionDocId, + }) + } + const actionParameters: ExecuteActionParameters = { actionId: data.actionId, userData: data.userData, diff --git a/packages/job-worker/src/playout/expectedPackages.ts b/packages/job-worker/src/playout/expectedPackages.ts new file mode 100644 index 0000000000..0fc07f795d --- /dev/null +++ b/packages/job-worker/src/playout/expectedPackages.ts @@ -0,0 +1,133 @@ +import type { CleanupOrphanedExpectedPackageReferencesProps } from '@sofie-automation/corelib/dist/worker/studio' +import type { JobContext } from '../jobs/index.js' +import { runWithPlaylistLock } from './lock.js' +import { + ExpectedPackageDB, + isPackageReferencedByPlayout, +} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' +import { AnyBulkWriteOperation } from 'mongodb' +import { ExpectedPackageId, PieceInstanceId } from '@sofie-automation/corelib/dist/dataModel/Ids' + +export async function handleCleanupOrphanedExpectedPackageReferences( + context: JobContext, + data: CleanupOrphanedExpectedPackageReferencesProps +): Promise { + // Something has changed in the PieceInstances, we need to check that the ExpectedPackages have only valid PieceInstances as owners, and remove any which no longer have owners + + await runWithPlaylistLock(context, data.playlistId, async () => { + const [existingPackages, validPieceInstances] = await Promise.all([ + context.directCollections.ExpectedPackages.findFetch( + { + studioId: context.studioId, + rundownId: data.rundownId, + bucketId: null, + }, + { + projection: { + _id: 1, + playoutSources: 1, + // We only need to know if there are any entries, so project them to be as minimal as possible + 'ingestSources.fromPieceType': 1, + }, + } + ) as Promise< + Array< + Pick & { + ingestSources: unknown[] + } + > + >, + context.directCollections.PieceInstances.findFetch( + { + rundownId: data.rundownId, + reset: { $ne: true }, + }, + { + projection: { + _id: 1, + neededExpectedPackageIds: 1, + }, + } + ) as Promise>>, + ]) + + const pieceInstancePackageMap = new Map>() + for (const pieceInstance of validPieceInstances) { + if (pieceInstance.neededExpectedPackageIds && pieceInstance.neededExpectedPackageIds.length > 0) + pieceInstancePackageMap.set(pieceInstance._id, new Set(pieceInstance.neededExpectedPackageIds)) + } + + const writeOps: AnyBulkWriteOperation[] = [] + + for (const expectedPackage of existingPackages) { + // Find the pieceInstanceIds that are stale + const pieceInstanceIdsToRemove: PieceInstanceId[] = [] + for (const pieceInstanceId of expectedPackage.playoutSources.pieceInstanceIds) { + const pieceInstancePackageIds = pieceInstancePackageMap.get(pieceInstanceId) + if (!pieceInstancePackageIds || !pieceInstancePackageIds.has(expectedPackage._id)) { + // This pieceInstanceId is no longer valid, queue it to be removed + pieceInstanceIdsToRemove.push(pieceInstanceId) + } + } + + // Queue the write + if (pieceInstanceIdsToRemove.length === expectedPackage.playoutSources.pieceInstanceIds.length) { + // It looks like all the pieceInstanceIds are being removed + + if ( + expectedPackage.ingestSources.length === 0 && + !isPackageReferencedByPlayout({ + // Test with a fake package + ...expectedPackage, + playoutSources: { + ...expectedPackage.playoutSources, + pieceInstanceIds: [], + }, + }) + ) { + // This package is not referenced by anything, so we can delete it + writeOps.push({ + deleteOne: { + filter: { + _id: expectedPackage._id, + }, + }, + }) + } else { + // This package is still referenced by something, so we need to keep it + writeOps.push({ + updateOne: { + filter: { + _id: expectedPackage._id, + }, + update: { + $set: { + 'playoutSources.pieceInstanceIds': [], + }, + }, + }, + }) + } + } else if (pieceInstanceIdsToRemove.length > 0) { + // Some of the pieceInstanceIds are being removed + writeOps.push({ + updateOne: { + filter: { + _id: expectedPackage._id, + }, + update: { + $pull: { + 'playoutSources.pieceInstanceIds': { $in: pieceInstanceIdsToRemove }, + }, + }, + }, + }) + } + } + + if (writeOps.length > 0) { + await context.directCollections.ExpectedPackages.bulkWrite(writeOps) + } + }) +} diff --git a/packages/job-worker/src/playout/model/PlayoutModel.ts b/packages/job-worker/src/playout/model/PlayoutModel.ts index 0dff06ff91..439d58b895 100644 --- a/packages/job-worker/src/playout/model/PlayoutModel.ts +++ b/packages/job-worker/src/playout/model/PlayoutModel.ts @@ -17,6 +17,7 @@ import { DBRundownPlaylist, QuickLoopMarker, RundownHoldState, + RundownTTimer, } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { ReadonlyDeep } from 'type-fest' import { StudioPlayoutModelBase, StudioPlayoutModelBaseReadonly } from '../../studio/model/StudioPlayoutModel.js' @@ -374,6 +375,12 @@ export interface PlayoutModel extends PlayoutModelReadonly, StudioPlayoutModelBa */ setQuickLoopMarker(type: 'start' | 'end', marker: QuickLoopMarker | null): void + /** + * Update a T-timer + * @param timer Timer properties + */ + updateTTimer(timer: RundownTTimer): void + calculatePartTimings( fromPartInstance: PlayoutPartInstanceModel | null, toPartInstance: PlayoutPartInstanceModel, diff --git a/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts b/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts index 76086bd98c..a593fad9c6 100644 --- a/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts +++ b/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts @@ -16,6 +16,7 @@ import { DBRundownPlaylist, QuickLoopMarker, RundownHoldState, + RundownTTimer, SelectedPartInstance, } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { ReadonlyDeep } from 'type-fest' @@ -37,7 +38,13 @@ import _ from 'underscore' import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { PlaylistLock } from '../../../jobs/lock.js' import { logger } from '../../../logging.js' -import { clone, getRandomId, literal, normalizeArrayToMapFunc } from '@sofie-automation/corelib/dist/lib' +import { + clone, + getRandomId, + groupByToMapFunc, + literal, + normalizeArrayToMapFunc, +} from '@sofie-automation/corelib/dist/lib' import { sleep } from '@sofie-automation/shared-lib/dist/lib/lib' import { sortRundownIDsInPlaylist } from '@sofie-automation/corelib/dist/playout/playlist' import { PlayoutRundownModel } from '../PlayoutRundownModel.js' @@ -50,10 +57,13 @@ import { protectString } from '@sofie-automation/shared-lib/dist/lib/protectedSt import { queuePartInstanceTimingEvent } from '../../timings/events.js' import { IS_PRODUCTION } from '../../../environment.js' import { DeferredAfterSaveFunction, DeferredFunction, PlayoutModel, PlayoutModelReadonly } from '../PlayoutModel.js' -import { writePartInstancesAndPieceInstances, writeAdlibTestingSegments } from './SavePlayoutModel.js' +import { + writePartInstancesAndPieceInstances, + writeAdlibTestingSegments, + writeExpectedPackagesForPlayoutSources, +} from './SavePlayoutModel.js' import { PlayoutPieceInstanceModel } from '../PlayoutPieceInstanceModel.js' import { DatabasePersistedModel } from '../../../modelBase.js' -import { ExpectedPackageDBFromStudioBaselineObjects } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItemStudio } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' import { StudioBaselineHelper } from '../../../studio/model/StudioBaselineHelper.js' import { QuickLoopService } from '../services/QuickLoopService.js' @@ -61,6 +71,8 @@ import { calculatePartTimings, PartCalculatedTimings } from '@sofie-automation/c import { PieceInstanceWithTimings } from '@sofie-automation/corelib/dist/playout/processAndPrune' import { NotificationsModelHelper } from '../../../notifications/NotificationsModelHelper.js' import { getExpectedLatency } from '@sofie-automation/corelib/dist/studio/playout' +import { ExpectedPackage } from '@sofie-automation/blueprints-integration' +import { validateTTimerIndex } from '../../tTimers.js' export class PlayoutModelReadonlyImpl implements PlayoutModelReadonly { public readonly playlistId: RundownPlaylistId @@ -696,12 +708,20 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou } this.#timelineHasChanged = false + const partInstancesByRundownId = groupByToMapFunc( + Array.from(this.allPartInstances.values()).filter((p) => !!p), + (p) => p.partInstance.rundownId + ) + await Promise.all([ this.#playlistHasChanged ? this.context.directCollections.RundownPlaylists.replace(this.playlistImpl) : undefined, ...writePartInstancesAndPieceInstances(this.context, this.allPartInstances), writeAdlibTestingSegments(this.context, this.rundownsImpl), + ...Array.from(partInstancesByRundownId.entries()).map(async ([rundownId, partInstances]) => + writeExpectedPackagesForPlayoutSources(this.context, this.playlistId, rundownId, partInstances) + ), this.#baselineHelper.saveAllToDatabase(), this.#notificationsHelper.saveAllToDatabase(), this.context.saveRouteSetChanges(), @@ -841,7 +861,7 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou return this.timelineImpl } - setExpectedPackagesForStudioBaseline(packages: ExpectedPackageDBFromStudioBaselineObjects[]): void { + setExpectedPackagesForStudioBaseline(packages: ExpectedPackage.Any[]): void { this.#baselineHelper.setExpectedPackages(packages) } setExpectedPlayoutItemsForStudioBaseline(playoutItems: ExpectedPlayoutItemStudio[]): void { @@ -859,6 +879,13 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou this.#playlistHasChanged = true } + updateTTimer(timer: RundownTTimer): void { + validateTTimerIndex(timer.index) + + this.playlistImpl.tTimers[timer.index - 1] = timer + this.#playlistHasChanged = true + } + #lastMonotonicNowInPlayout = getCurrentTime() getNowInPlayout(): number { const nowOffsetLatency = this.getNowOffsetLatency() ?? 0 diff --git a/packages/job-worker/src/playout/model/implementation/PlayoutPieceInstanceModelImpl.ts b/packages/job-worker/src/playout/model/implementation/PlayoutPieceInstanceModelImpl.ts index 4f10a8ec79..6c1f5a9588 100644 --- a/packages/job-worker/src/playout/model/implementation/PlayoutPieceInstanceModelImpl.ts +++ b/packages/job-worker/src/playout/model/implementation/PlayoutPieceInstanceModelImpl.ts @@ -1,10 +1,11 @@ -import { PieceInstanceInfiniteId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { ExpectedPackageId, PieceInstanceInfiniteId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { ReadonlyDeep } from 'type-fest' import { PieceInstance, PieceInstancePiece } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' import { clone, getRandomId } from '@sofie-automation/corelib/dist/lib' -import { Time } from '@sofie-automation/blueprints-integration' +import { ExpectedPackage, Time } from '@sofie-automation/blueprints-integration' import { PlayoutPieceInstanceModel } from '../PlayoutPieceInstanceModel.js' import _ from 'underscore' +import { getExpectedPackageId } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' export class PlayoutPieceInstanceModelImpl implements PlayoutPieceInstanceModel { /** @@ -13,6 +14,8 @@ export class PlayoutPieceInstanceModelImpl implements PlayoutPieceInstanceModel */ PieceInstanceImpl: PieceInstance + updatedExpectedPackages: Map> | null + /** * Set/delete a value for this PieceInstance, and track that there are changes * @param key Property key @@ -26,6 +29,16 @@ export class PlayoutPieceInstanceModelImpl implements PlayoutPieceInstanceModel } this.#hasChanges = true + + // Updating the 'piece' has side effects on the expectedPackages + if (key === 'piece') { + const newPiece = newValue as PieceInstance['piece'] | undefined + this.updatedExpectedPackages = createExpectedPackagesMap( + this.PieceInstanceImpl.rundownId, + newPiece?.expectedPackages + ) + this.PieceInstanceImpl.neededExpectedPackageIds = Array.from(this.updatedExpectedPackages.keys()) + } } /** @@ -57,7 +70,7 @@ export class PlayoutPieceInstanceModelImpl implements PlayoutPieceInstanceModel * Whether this PieceInstance has unsaved changes */ get HasChanges(): boolean { - return this.#hasChanges + return this.#hasChanges || !!this.updatedExpectedPackages } /** @@ -71,9 +84,19 @@ export class PlayoutPieceInstanceModelImpl implements PlayoutPieceInstanceModel return this.PieceInstanceImpl } - constructor(pieceInstances: PieceInstance, hasChanges: boolean) { - this.PieceInstanceImpl = pieceInstances + constructor(pieceInstance: PieceInstance, hasChanges: boolean) { + this.PieceInstanceImpl = pieceInstance this.#hasChanges = hasChanges + + if (hasChanges) { + this.updatedExpectedPackages = createExpectedPackagesMap( + pieceInstance.rundownId, + pieceInstance.piece.expectedPackages + ) + this.PieceInstanceImpl.neededExpectedPackageIds = Array.from(this.updatedExpectedPackages.keys()) + } else { + this.updatedExpectedPackages = null + } } /** @@ -137,3 +160,16 @@ export class PlayoutPieceInstanceModelImpl implements PlayoutPieceInstanceModel ) } } + +function createExpectedPackagesMap( + rundownId: RundownId, + packages: ExpectedPackage.Base[] | undefined +): Map> { + const map = new Map>() + if (!packages) return map + + for (const pkg of packages) { + map.set(getExpectedPackageId(rundownId, pkg), pkg) + } + return map +} diff --git a/packages/job-worker/src/playout/model/implementation/SavePlayoutModel.ts b/packages/job-worker/src/playout/model/implementation/SavePlayoutModel.ts index e5e218c7b8..e43d55348c 100644 --- a/packages/job-worker/src/playout/model/implementation/SavePlayoutModel.ts +++ b/packages/job-worker/src/playout/model/implementation/SavePlayoutModel.ts @@ -1,4 +1,10 @@ -import { PartInstanceId, PieceInstanceId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { + ExpectedPackageId, + PartInstanceId, + PieceInstanceId, + RundownId, + RundownPlaylistId, +} from '@sofie-automation/corelib/dist/dataModel/Ids' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' import { DBSegment, SegmentOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Segment' @@ -7,6 +13,11 @@ import { AnyBulkWriteOperation } from 'mongodb' import { JobContext } from '../../../jobs/index.js' import { PlayoutPartInstanceModelImpl } from './PlayoutPartInstanceModelImpl.js' import { PlayoutRundownModelImpl } from './PlayoutRundownModelImpl.js' +import { ReadonlyDeep } from 'type-fest' +import { ExpectedPackage } from '@sofie-automation/blueprints-integration' +import { normalizeArrayToMap } from '@sofie-automation/corelib/dist/lib' +import { ExpectedPackageDB } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { StudioJobs } from '@sofie-automation/corelib/dist/worker/studio' /** * Save any changed AdlibTesting Segments @@ -136,3 +147,143 @@ export function writePartInstancesAndPieceInstances( : Promise.resolve(), ] } + +interface ExpectedPackageEntry { + _id: ExpectedPackageId + package: ReadonlyDeep + + pieceInstanceIds: PieceInstanceId[] +} + +export async function writeExpectedPackagesForPlayoutSources( + context: JobContext, + playlistId: RundownPlaylistId, + rundownId: RundownId, + partInstancesForRundown: PlayoutPartInstanceModelImpl[] +): Promise { + // We know we are inside the playout lock, so we can safely load from the packages and it won't be modified by another thread + + const existingPackages = (await context.directCollections.ExpectedPackages.findFetch( + { + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + }, + { + projection: { + _id: 1, + playoutSources: 1, + }, + } + )) as Pick[] + const existingPackagesMap = normalizeArrayToMap(existingPackages, '_id') + + const pieceInstancesToAddToPackages = new Map() + const packagesToInsert = new Map() + + let hasPieceInstanceExpectedPackageChanges = false + + for (const partInstance of partInstancesForRundown) { + if (!partInstance) continue + + for (const pieceInstance of partInstance.pieceInstancesImpl.values()) { + if (!pieceInstance) { + // PieceInstance was deleted, cleanup may be needed + hasPieceInstanceExpectedPackageChanges = true + continue + } + + // The expectedPackages of the PieceInstance has not been modified, so there is nothing to do + if (!pieceInstance.updatedExpectedPackages) continue + + hasPieceInstanceExpectedPackageChanges = true + + // Any removed references will be removed by the debounced job + + for (const [packageId, expectedPackage] of pieceInstance.updatedExpectedPackages) { + const existingPackage = existingPackagesMap.get(packageId) + if (existingPackage?.playoutSources.pieceInstanceIds.includes(pieceInstance.pieceInstance._id)) { + // Reference already exists, nothing to do + continue + } + + if (existingPackage) { + // Add the pieceInstanceId to the existing package + const pieceInstanceIds = pieceInstancesToAddToPackages.get(packageId) ?? [] + pieceInstanceIds.push(pieceInstance.pieceInstance._id) + pieceInstancesToAddToPackages.set(packageId, pieceInstanceIds) + } else { + // Record as needing a new document, or add to existing entry if already queued for insert + const existingEntry = packagesToInsert.get(packageId) + if (existingEntry) { + existingEntry.pieceInstanceIds.push(pieceInstance.pieceInstance._id) + } else { + packagesToInsert.set(packageId, { + _id: packageId, + package: expectedPackage, + pieceInstanceIds: [pieceInstance.pieceInstance._id], + }) + } + + // Future: If this came from a bucket, can we copy the packageInfos across to minimise latency until the status is ready? + } + } + } + } + + // We now know what needs to be written (only the additive changes) + + const writeOps: AnyBulkWriteOperation[] = [] + for (const [packageId, pieceInstanceIds] of pieceInstancesToAddToPackages.entries()) { + writeOps.push({ + updateOne: { + filter: { _id: packageId }, + update: { + $addToSet: { + 'playoutSources.pieceInstanceIds': { $each: pieceInstanceIds }, + }, + }, + }, + }) + } + + for (const packageEntry of packagesToInsert.values()) { + writeOps.push({ + insertOne: { + document: { + _id: packageEntry._id, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + created: Date.now(), + + package: packageEntry.package, + ingestSources: [], + playoutSources: { + pieceInstanceIds: packageEntry.pieceInstanceIds, + }, + }, + }, + }) + } + + if (writeOps.length > 0) { + await context.directCollections.ExpectedPackages.bulkWrite(writeOps) + } + + // We can't easily track any references which have been deleted, so we should schedule a cleanup job to deal with that for us + // Only queue if there were changes to expected packages, to avoid unnecessary job scheduling + if (hasPieceInstanceExpectedPackageChanges) { + await context.queueStudioJob( + StudioJobs.CleanupOrphanedExpectedPackageReferences, + { + playlistId: playlistId, + rundownId: rundownId, + }, + { + lowPriority: true, + debounce: 1000, + } + ) + } +} diff --git a/packages/job-worker/src/playout/model/implementation/__tests__/SavePlayoutModel.spec.ts b/packages/job-worker/src/playout/model/implementation/__tests__/SavePlayoutModel.spec.ts index 94eb084684..497ab26123 100644 --- a/packages/job-worker/src/playout/model/implementation/__tests__/SavePlayoutModel.spec.ts +++ b/packages/job-worker/src/playout/model/implementation/__tests__/SavePlayoutModel.spec.ts @@ -4,12 +4,23 @@ import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { PlayoutRundownModelImpl } from '../PlayoutRundownModelImpl.js' import { setupDefaultJobEnvironment } from '../../../../__mocks__/context.js' -import { writePartInstancesAndPieceInstances, writeAdlibTestingSegments } from '../SavePlayoutModel.js' +import { + writePartInstancesAndPieceInstances, + writeAdlibTestingSegments, + writeExpectedPackagesForPlayoutSources, +} from '../SavePlayoutModel.js' import { PlayoutPartInstanceModelImpl } from '../PlayoutPartInstanceModelImpl.js' -import { PartInstanceId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { + PartInstanceId, + PieceInstanceId, + RundownId, + RundownPlaylistId, +} from '@sofie-automation/corelib/dist/dataModel/Ids' import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' import { mock } from 'jest-mock-extended' import { QuickLoopService } from '../../services/QuickLoopService.js' +import { ExpectedPackageDB, getExpectedPackageId } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { ExpectedPackage } from '@sofie-automation/blueprints-integration' describe('SavePlayoutModel', () => { function createRundownModel(segments?: DBSegment[]): PlayoutRundownModelImpl { @@ -411,4 +422,264 @@ describe('SavePlayoutModel', () => { `) }) }) + + describe('writeExpectedPackagesForPlayoutSources', () => { + const rundownId = protectString('rundown0') + const playlistId = protectString('playlist0') + + function createMockExpectedPackage(id: string): ExpectedPackage.ExpectedPackageMediaFile { + return { + _id: id, + type: ExpectedPackage.PackageType.MEDIA_FILE, + layers: ['layer0'], + content: { filePath: `/media/${id}.mp4` }, + version: {}, + contentVersionHash: `hash_${id}`, + sources: [], + sideEffect: {}, + } + } + + function createPartInstanceWithPieceInstances( + partInstanceId: string, + pieceInstances: PieceInstance[], + hasExpectedPackageChanges: boolean + ): PlayoutPartInstanceModelImpl { + const partInstanceModel = new PlayoutPartInstanceModelImpl( + { _id: partInstanceId, rundownId } as any, + pieceInstances, + hasExpectedPackageChanges, + mock() + ) + return partInstanceModel + } + + function createPieceInstanceWithExpectedPackages( + pieceInstanceId: string, + expectedPackages: ExpectedPackage.Base[] + ): PieceInstance { + return { + _id: protectString(pieceInstanceId), + rundownId: rundownId, + partInstanceId: protectString('partInstance0'), + piece: { + _id: protectString(`piece_${pieceInstanceId}`), + expectedPackages, + }, + } as unknown as PieceInstance + } + + it('no PartInstances', async () => { + const context = setupDefaultJobEnvironment() + + await writeExpectedPackagesForPlayoutSources(context, playlistId, rundownId, []) + + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(1) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + }) + + it('PieceInstance with no expected package changes', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg = createMockExpectedPackage('pkg0') + const pieceInstance = createPieceInstanceWithExpectedPackages('pieceInstance0', [expectedPkg]) + // hasExpectedPackageChanges = false means no updatedExpectedPackages will be set + const partInstance = createPartInstanceWithPieceInstances('partInstance0', [pieceInstance], false) + + await writeExpectedPackagesForPlayoutSources(context, playlistId, rundownId, [partInstance]) + + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(1) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + }) + + it('inserts new ExpectedPackage when PieceInstance has expected packages', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg = createMockExpectedPackage('pkg0') + const pieceInstance = createPieceInstanceWithExpectedPackages('pieceInstance0', [expectedPkg]) + // hasExpectedPackageChanges = true sets up updatedExpectedPackages + const partInstance = createPartInstanceWithPieceInstances('partInstance0', [pieceInstance], true) + + await writeExpectedPackagesForPlayoutSources(context, playlistId, rundownId, [partInstance]) + + // Should have findFetch, bulkWrite, and insertOne (bulkWrite logs itself then calls insertOne which also logs) + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(3) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + expect(context.mockCollections.ExpectedPackages.operations[1].type).toBe('bulkWrite') + expect(context.mockCollections.ExpectedPackages.operations[1].args[0]).toBe(1) // 1 operation + expect(context.mockCollections.ExpectedPackages.operations[2].type).toBe('insertOne') + + // Verify the inserted package has correct structure + const insertedPackageId = context.mockCollections.ExpectedPackages.operations[2].args[0] + const insertedPackage = await context.directCollections.ExpectedPackages.findOne(insertedPackageId) + expect(insertedPackage).toMatchObject({ + _id: insertedPackageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + package: expectedPkg, + ingestSources: [], + playoutSources: { + pieceInstanceIds: [protectString('pieceInstance0')], + }, + } satisfies Omit) + expect(insertedPackage?.created).toBeGreaterThan(0) + }) + + it('does not add pieceInstanceId if reference already exists in package', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg = createMockExpectedPackage('pkg0') + const packageId = getExpectedPackageId(rundownId, expectedPkg) + const pieceInstanceId = protectString('pieceInstance0') + + // Pre-populate with package that already has this pieceInstanceId + const existingPackage: ExpectedPackageDB = { + _id: packageId, + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + created: Date.now(), + package: expectedPkg, + ingestSources: [], + playoutSources: { + pieceInstanceIds: [pieceInstanceId], + }, + } + await context.directCollections.ExpectedPackages.insertOne(existingPackage) + context.mockCollections.ExpectedPackages.clearOpLog() + + const pieceInstance = createPieceInstanceWithExpectedPackages('pieceInstance0', [expectedPkg]) + const partInstance = createPartInstanceWithPieceInstances('partInstance0', [pieceInstance], true) + + await writeExpectedPackagesForPlayoutSources(context, playlistId, rundownId, [partInstance]) + + // Should only have findFetch, no bulkWrite since reference already exists + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(1) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + }) + + it('handles multiple PieceInstances with different packages', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg1 = createMockExpectedPackage('pkg1') + const expectedPkg2 = createMockExpectedPackage('pkg2') + + const pieceInstance1 = createPieceInstanceWithExpectedPackages('pieceInstance1', [expectedPkg1]) + const pieceInstance2 = createPieceInstanceWithExpectedPackages('pieceInstance2', [expectedPkg2]) + + const partInstance = createPartInstanceWithPieceInstances( + 'partInstance0', + [pieceInstance1, pieceInstance2], + true + ) + + await writeExpectedPackagesForPlayoutSources(context, playlistId, rundownId, [partInstance]) + + // Should have findFetch, bulkWrite, and 2 insertOne ops + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(4) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + expect(context.mockCollections.ExpectedPackages.operations[1].type).toBe('bulkWrite') + expect(context.mockCollections.ExpectedPackages.operations[1].args[0]).toBe(2) // 2 operations + expect(context.mockCollections.ExpectedPackages.operations[2].type).toBe('insertOne') + expect(context.mockCollections.ExpectedPackages.operations[3].type).toBe('insertOne') + }) + + it('handles multiple PieceInstances referencing the same package', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg = createMockExpectedPackage('sharedPkg') + + const pieceInstance1 = createPieceInstanceWithExpectedPackages('pieceInstance1', [expectedPkg]) + const pieceInstance2 = createPieceInstanceWithExpectedPackages('pieceInstance2', [expectedPkg]) + + const partInstance = createPartInstanceWithPieceInstances( + 'partInstance0', + [pieceInstance1, pieceInstance2], + true + ) + + await writeExpectedPackagesForPlayoutSources(context, playlistId, rundownId, [partInstance]) + + // Should have findFetch, bulkWrite, and insertOne + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(3) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + expect(context.mockCollections.ExpectedPackages.operations[1].type).toBe('bulkWrite') + expect(context.mockCollections.ExpectedPackages.operations[1].args[0]).toBe(1) // Only 1 insert for the shared package + expect(context.mockCollections.ExpectedPackages.operations[2].type).toBe('insertOne') + + // Verify the package has both pieceInstanceIds + const insertedPackageId = context.mockCollections.ExpectedPackages.operations[2].args[0] + const insertedPackage = await context.directCollections.ExpectedPackages.findOne(insertedPackageId) + expect(insertedPackage?.playoutSources.pieceInstanceIds).toHaveLength(2) + expect(insertedPackage?.playoutSources.pieceInstanceIds).toContain( + protectString('pieceInstance1') + ) + expect(insertedPackage?.playoutSources.pieceInstanceIds).toContain( + protectString('pieceInstance2') + ) + }) + + it('handles multiple PartInstances', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg1 = createMockExpectedPackage('pkg1') + const expectedPkg2 = createMockExpectedPackage('pkg2') + + const pieceInstance1 = createPieceInstanceWithExpectedPackages('pieceInstance1', [expectedPkg1]) + const pieceInstance2 = createPieceInstanceWithExpectedPackages('pieceInstance2', [expectedPkg2]) + + const partInstance1 = createPartInstanceWithPieceInstances('partInstance1', [pieceInstance1], true) + const partInstance2 = createPartInstanceWithPieceInstances('partInstance2', [pieceInstance2], true) + + await writeExpectedPackagesForPlayoutSources(context, playlistId, rundownId, [partInstance1, partInstance2]) + + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(4) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + expect(context.mockCollections.ExpectedPackages.operations[1].type).toBe('bulkWrite') + expect(context.mockCollections.ExpectedPackages.operations[1].args[0]).toBe(2) // 2 insert operations + expect(context.mockCollections.ExpectedPackages.operations[2].type).toBe('insertOne') + expect(context.mockCollections.ExpectedPackages.operations[3].type).toBe('insertOne') + }) + + it('handles deleted PieceInstance triggering cleanup job', async () => { + const context = setupDefaultJobEnvironment() + + const partInstance = createPartInstanceWithPieceInstances('partInstance0', [], false) + // Simulate a deleted pieceInstance by setting null in the map + partInstance.pieceInstancesImpl.set(protectString('deletedPiece'), null) + + await writeExpectedPackagesForPlayoutSources(context, playlistId, rundownId, [partInstance]) + + // No writes expected since there are no packages to insert/update + // But the cleanup job should still be queued (which is handled silently in mock) + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(1) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + }) + + it('handles PieceInstance with multiple expected packages', async () => { + const context = setupDefaultJobEnvironment() + + const expectedPkg1 = createMockExpectedPackage('pkg1') + const expectedPkg2 = createMockExpectedPackage('pkg2') + const expectedPkg3 = createMockExpectedPackage('pkg3') + + const pieceInstance = createPieceInstanceWithExpectedPackages('pieceInstance0', [ + expectedPkg1, + expectedPkg2, + expectedPkg3, + ]) + const partInstance = createPartInstanceWithPieceInstances('partInstance0', [pieceInstance], true) + + await writeExpectedPackagesForPlayoutSources(context, playlistId, rundownId, [partInstance]) + + expect(context.mockCollections.ExpectedPackages.operations).toHaveLength(5) + expect(context.mockCollections.ExpectedPackages.operations[0].type).toBe('findFetch') + expect(context.mockCollections.ExpectedPackages.operations[1].type).toBe('bulkWrite') + expect(context.mockCollections.ExpectedPackages.operations[1].args[0]).toBe(3) // 3 insert operations + expect(context.mockCollections.ExpectedPackages.operations[2].type).toBe('insertOne') + expect(context.mockCollections.ExpectedPackages.operations[3].type).toBe('insertOne') + expect(context.mockCollections.ExpectedPackages.operations[4].type).toBe('insertOne') + }) + }) }) diff --git a/packages/job-worker/src/playout/snapshot.ts b/packages/job-worker/src/playout/snapshot.ts index c173f2cb29..95d9cb3b4d 100644 --- a/packages/job-worker/src/playout/snapshot.ts +++ b/packages/job-worker/src/playout/snapshot.ts @@ -1,4 +1,9 @@ -import { ExpectedPackageDBType, getExpectedPackageId } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { + ExpectedPackageDB, + ExpectedPackageDBType, + ExpectedPackageIngestSource, + getExpectedPackageId, +} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { AdLibActionId, ExpectedPackageId, @@ -25,12 +30,14 @@ import { CoreRundownPlaylistSnapshot } from '@sofie-automation/corelib/dist/snap import { unprotectString, ProtectedString, protectString } from '@sofie-automation/corelib/dist/protectedString' import { saveIntoDb } from '../db/changes.js' import { getPartId, getSegmentId } from '../ingest/lib.js' -import { assertNever, getRandomId, literal } from '@sofie-automation/corelib/dist/lib' +import { assertNever, getHash, getRandomId, literal, omit } from '@sofie-automation/corelib/dist/lib' import { logger } from '../logging.js' import { JSONBlobParse, JSONBlobStringify } from '@sofie-automation/shared-lib/dist/lib/JSONBlob' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { RundownOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { SofieIngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' +import * as PackagesPreR53 from '@sofie-automation/corelib/dist/dataModel/Old/ExpectedPackagesR52' +import { ExpectedPackage } from '@sofie-automation/blueprints-integration' class IdMapWithGenerator> extends Map { getOrGenerate(key: V): V { @@ -243,7 +250,7 @@ export async function handleRestorePlaylistSnapshot( } // List any ids that need updating on other documents - const rundownIdMap = new Map() + const rundownIdMap = new IdMapWithGenerator() const getNewRundownId = (oldRundownId: RundownId) => { const rundownId = rundownIdMap.get(oldRundownId) if (!rundownId) { @@ -340,47 +347,219 @@ export async function handleRestorePlaylistSnapshot( ) const expectedPackageIdMap = new Map() - for (const expectedPackage of snapshot.expectedPackages) { - const oldId = expectedPackage._id - - switch (expectedPackage.fromPieceType) { - case ExpectedPackageDBType.PIECE: - case ExpectedPackageDBType.ADLIB_PIECE: - case ExpectedPackageDBType.ADLIB_ACTION: - case ExpectedPackageDBType.BASELINE_ADLIB_PIECE: - case ExpectedPackageDBType.BASELINE_ADLIB_ACTION: - case ExpectedPackageDBType.BASELINE_PIECE: { - expectedPackage.pieceId = pieceIdMap.getOrGenerateAndWarn( - expectedPackage.pieceId, - `expectedPackage.pieceId=${expectedPackage.pieceId}` - ) - - expectedPackage._id = getExpectedPackageId(expectedPackage.pieceId, expectedPackage.blueprintPackageId) - - break + snapshot.expectedPackages = snapshot.expectedPackages.map((expectedPackage0): ExpectedPackageDB => { + if ('fromPieceType' in expectedPackage0) { + const expectedPackage = expectedPackage0 as unknown as PackagesPreR53.ExpectedPackageDB + + let source: ExpectedPackageIngestSource | undefined + + switch (expectedPackage.fromPieceType) { + case PackagesPreR53.ExpectedPackageDBType.PIECE: + case PackagesPreR53.ExpectedPackageDBType.ADLIB_PIECE: + case PackagesPreR53.ExpectedPackageDBType.ADLIB_ACTION: + source = { + fromPieceType: expectedPackage.fromPieceType, + pieceId: pieceIdMap.getOrGenerateAndWarn( + expectedPackage.pieceId, + `expectedPackage.pieceId=${expectedPackage.pieceId}` + ) as any, + partId: partIdMap.getOrGenerateAndWarn( + expectedPackage.partId, + `expectedPackage.partId=${expectedPackage.partId}` + ), + segmentId: segmentIdMap.getOrGenerateAndWarn( + expectedPackage.segmentId, + `expectedPackage.segmentId=${expectedPackage.segmentId}` + ), + blueprintPackageId: expectedPackage.blueprintPackageId, + listenToPackageInfoUpdates: expectedPackage.listenToPackageInfoUpdates, + } + + break + case PackagesPreR53.ExpectedPackageDBType.BASELINE_ADLIB_PIECE: + case PackagesPreR53.ExpectedPackageDBType.BASELINE_ADLIB_ACTION: { + source = { + fromPieceType: expectedPackage.fromPieceType, + pieceId: pieceIdMap.getOrGenerateAndWarn( + expectedPackage.pieceId, + `expectedPackage.pieceId=${expectedPackage.pieceId}` + ) as any, + blueprintPackageId: expectedPackage.blueprintPackageId, + listenToPackageInfoUpdates: expectedPackage.listenToPackageInfoUpdates, + } + + break + } + + case PackagesPreR53.ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: { + source = { + fromPieceType: expectedPackage.fromPieceType, + blueprintPackageId: expectedPackage.blueprintPackageId, + listenToPackageInfoUpdates: expectedPackage.listenToPackageInfoUpdates, + } + break + } + case PackagesPreR53.ExpectedPackageDBType.BUCKET_ADLIB: + case PackagesPreR53.ExpectedPackageDBType.BUCKET_ADLIB_ACTION: + case PackagesPreR53.ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: { + // ignore, these are not present in the rundown snapshot anyway. + logger.warn(`Unexpected ExpectedPackage in snapshot: ${JSON.stringify(expectedPackage)}`) + break + } + + default: + assertNever(expectedPackage) + break + } + + if (!source) { + logger.warn(`Failed to fixup ExpectedPackage in snapshot: ${JSON.stringify(expectedPackage)}`) + // Define a fake source, so that it gets imported. + source = { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('fakePiece'), + partId: protectString('fakePart'), + segmentId: protectString('fakeSegment'), + blueprintPackageId: expectedPackage.blueprintPackageId, + listenToPackageInfoUpdates: expectedPackage.listenToPackageInfoUpdates, + } + } + + const packageRundownId: RundownId | null = + 'rundownId' in expectedPackage + ? rundownIdMap.getOrGenerateAndWarn( + expectedPackage.rundownId, + `expectedPackage.rundownId=${expectedPackage.rundownId}` + ) + : null + + // Generate a unique id for the package. + // This is done differently to ensure we don't have id collisions that the documents arent expecting + // Note: maybe this should do the work to generate in the new deduplicated form, but that likely has no benefit + let packageOwnerId: string + const ownerPieceType = source.fromPieceType + switch (source.fromPieceType) { + case ExpectedPackageDBType.PIECE: + case ExpectedPackageDBType.ADLIB_PIECE: + case ExpectedPackageDBType.ADLIB_ACTION: + case ExpectedPackageDBType.BASELINE_PIECE: + case ExpectedPackageDBType.BASELINE_ADLIB_PIECE: + case ExpectedPackageDBType.BASELINE_ADLIB_ACTION: + packageOwnerId = unprotectString(source.pieceId) + break + case ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: + packageOwnerId = 'rundownBaselineObjects' + break + case ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: + packageOwnerId = 'studioBaseline' + break + case ExpectedPackageDBType.BUCKET_ADLIB: + case ExpectedPackageDBType.BUCKET_ADLIB_ACTION: + packageOwnerId = unprotectString(source.pieceId) + break + + default: + assertNever(source) + throw new Error(`Unknown fromPieceType "${ownerPieceType}"`) } - case ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: { - expectedPackage._id = getExpectedPackageId( - expectedPackage.rundownId, + const newPackageId = protectString( + `${packageRundownId || context.studioId}_${packageOwnerId}_${getHash( expectedPackage.blueprintPackageId - ) - break + )}` + ) + + const newExpectedPackage: ExpectedPackageDB = { + _id: newPackageId, + studioId: context.studioId, + rundownId: packageRundownId, + bucketId: null, + created: expectedPackage.created, + package: { + ...(omit( + expectedPackage, + '_id', + 'studioId', + 'fromPieceType', + 'blueprintPackageId', + 'contentVersionHash', + // @ts-expect-error only sometimes present + 'rundownId', + 'pieceId', + 'partId', + 'segmentId', + 'pieceExternalId' + ) as ExpectedPackage.Any), + _id: expectedPackage.blueprintPackageId, + }, + + ingestSources: [source], + playoutSources: { + pieceInstanceIds: [], + }, } - case ExpectedPackageDBType.BUCKET_ADLIB: - case ExpectedPackageDBType.BUCKET_ADLIB_ACTION: - case ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: { - // ignore, these are not present in the rundown snapshot anyway. - logger.warn(`Unexpected ExpectedPackage in snapshot: ${JSON.stringify(expectedPackage)}`) - break + + expectedPackageIdMap.set(expectedPackage._id, newExpectedPackage._id) + return newExpectedPackage + } else { + const expectedPackage = expectedPackage0 + const oldId = expectedPackage._id + + for (const source of expectedPackage.ingestSources) { + switch (source.fromPieceType) { + case ExpectedPackageDBType.PIECE: + case ExpectedPackageDBType.ADLIB_PIECE: + case ExpectedPackageDBType.ADLIB_ACTION: + source.pieceId = pieceIdMap.getOrGenerateAndWarn( + source.pieceId, + `expectedPackage.pieceId=${source.pieceId}` + ) as any + source.partId = partIdMap.getOrGenerateAndWarn( + source.partId, + `expectedPackage.partId=${source.partId}` + ) + source.segmentId = segmentIdMap.getOrGenerateAndWarn( + source.segmentId, + `expectedPackage.segmentId=${source.segmentId}` + ) + + break + case ExpectedPackageDBType.BASELINE_PIECE: + case ExpectedPackageDBType.BASELINE_ADLIB_PIECE: + case ExpectedPackageDBType.BASELINE_ADLIB_ACTION: { + source.pieceId = pieceIdMap.getOrGenerateAndWarn( + source.pieceId, + `expectedPackage.pieceId=${source.pieceId}` + ) as any + + break + } + case ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: { + // No properties to update + break + } + case ExpectedPackageDBType.BUCKET_ADLIB: + case ExpectedPackageDBType.BUCKET_ADLIB_ACTION: + case ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: { + // ignore, these are not present in the rundown snapshot anyway. + logger.warn(`Unexpected ExpectedPackage in snapshot: ${JSON.stringify(expectedPackage)}`) + break + } + default: + assertNever(source) + break + } } - default: - assertNever(expectedPackage) - break - } + // Regenerate the ID from the new rundownId and packageId + expectedPackage._id = getExpectedPackageId( + expectedPackage.rundownId || expectedPackage.studioId, + expectedPackage.package + ) - expectedPackageIdMap.set(oldId, expectedPackage._id) - } + expectedPackageIdMap.set(oldId, expectedPackage._id) + return expectedPackage + } + }) snapshot.playlist.rundownIdsInOrder = snapshot.playlist.rundownIdsInOrder.map((id) => rundownIdMap.get(id) ?? id) diff --git a/packages/job-worker/src/playout/tTimers.ts b/packages/job-worker/src/playout/tTimers.ts new file mode 100644 index 0000000000..5477491d71 --- /dev/null +++ b/packages/job-worker/src/playout/tTimers.ts @@ -0,0 +1,172 @@ +import type { RundownTTimerIndex, RundownTTimerMode } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import { getCurrentTime } from '../lib/index.js' +import type { ReadonlyDeep } from 'type-fest' +import * as chrono from 'chrono-node' + +export function validateTTimerIndex(index: number): asserts index is RundownTTimerIndex { + if (isNaN(index) || index < 1 || index > 3) throw new Error(`T-timer index out of range: ${index}`) +} + +/** + * Returns an updated T-timer in the paused state (if supported) + * @param timer Timer to update + * @returns If the timer supports pausing, the timer in paused state, otherwise null + */ +export function pauseTTimer(timer: ReadonlyDeep | null): ReadonlyDeep | null { + if (timer?.type === 'countdown' || timer?.type === 'freeRun') { + if (timer.pauseTime) { + // Already paused + return timer + } + + return { + ...timer, + pauseTime: getCurrentTime(), + } + } else { + return null + } +} + +/** + * Returns an updated T-timer in the resumed state (if supported) + * @param timer Timer to update + * @returns If the timer supports pausing, the timer in resumed state, otherwise null + */ +export function resumeTTimer(timer: ReadonlyDeep | null): ReadonlyDeep | null { + if (timer?.type === 'countdown' || timer?.type === 'freeRun') { + if (!timer.pauseTime) { + // Already running + return timer + } + + const pausedOffset = timer.startTime - timer.pauseTime + const newStartTime = getCurrentTime() + pausedOffset + + return { + ...timer, + startTime: newStartTime, + pauseTime: null, + } + } else { + return null + } +} + +/** + * Returns an updated T-timer, after restarting (if supported) + * @param timer Timer to update + * @returns If the timer supports restarting, the restarted timer, otherwise null + */ +export function restartTTimer(timer: ReadonlyDeep | null): ReadonlyDeep | null { + if (timer?.type === 'countdown') { + return { + ...timer, + startTime: getCurrentTime(), + pauseTime: timer.pauseTime ? getCurrentTime() : null, + } + } else if (timer?.type === 'timeOfDay') { + const nextTime = calculateNextTimeOfDayTarget(timer.targetRaw) + // If we can't calculate the next time, we can't restart + if (nextTime === null || nextTime === timer.targetTime) return null + + return { + ...timer, + targetTime: nextTime, + } + } else { + return null + } +} + +/** + * Create a new countdown T-timer + * @param index Timer index + * @param duration Duration in milliseconds + * @param options Options for the countdown + * @returns The created T-timer + */ +export function createCountdownTTimer( + duration: number, + options: { + stopAtZero: boolean + startPaused: boolean + } +): ReadonlyDeep { + if (duration <= 0) throw new Error('Duration must be greater than zero') + + const now = getCurrentTime() + return { + type: 'countdown', + startTime: now, + pauseTime: options.startPaused ? now : null, + duration, + stopAtZero: !!options.stopAtZero, + } +} + +export function createTimeOfDayTTimer( + targetTime: string | number, + options: { + stopAtZero: boolean + } +): ReadonlyDeep { + const nextTime = calculateNextTimeOfDayTarget(targetTime) + if (nextTime === null) throw new Error('Unable to parse target time for timeOfDay T-timer') + + return { + type: 'timeOfDay', + targetTime: nextTime, + targetRaw: targetTime, + stopAtZero: !!options.stopAtZero, + } +} + +/** + * Create a new free-running T-timer + * @param index Timer index + * @param options Options for the free-run + * @returns The created T-timer + */ +export function createFreeRunTTimer(options: { startPaused: boolean }): ReadonlyDeep { + const now = getCurrentTime() + return { + type: 'freeRun', + startTime: now, + pauseTime: options.startPaused ? now : null, + } +} + +/** + * Calculate the current time of a T-timer + * @param startTime The start time of the timer (unix timestamp) + * @param pauseTime The pause time of the timer (unix timestamp) or null if not paused + */ +export function calculateTTimerCurrentTime(startTime: number, pauseTime: number | null): number { + if (pauseTime) { + return pauseTime - startTime + } else { + return getCurrentTime() - startTime + } +} + +/** + * Calculate the next target time for a timeOfDay T-timer + * @param targetTime The target time, as a string or timestamp number + * @returns The next target timestamp in milliseconds, or null if it could not be calculated + */ +export function calculateNextTimeOfDayTarget(targetTime: string | number): number | null { + if (typeof targetTime === 'number') { + // This should be a unix timestamp + return targetTime + } + + // Verify we have a string worth parsing + if (typeof targetTime !== 'string' || !targetTime) return null + + const parsed = chrono.parseDate(targetTime, undefined, { + // Always look ahead for the next occurrence + forwardDate: true, + }) + return parsed ? parsed.getTime() : null +} diff --git a/packages/job-worker/src/playout/timeline/generate.ts b/packages/job-worker/src/playout/timeline/generate.ts index 960bf02f5b..44acd584a4 100644 --- a/packages/job-worker/src/playout/timeline/generate.ts +++ b/packages/job-worker/src/playout/timeline/generate.ts @@ -91,7 +91,7 @@ export async function updateStudioTimeline( const studioBlueprint = context.studioBlueprint if (studioBlueprint) { - const watchedPackages = await WatchedPackagesHelper.create(context, { + const watchedPackages = await WatchedPackagesHelper.create(context, null, null, { fromPieceType: ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS, }) diff --git a/packages/job-worker/src/rundownPlaylists.ts b/packages/job-worker/src/rundownPlaylists.ts index 86e637802a..eb61a94b06 100644 --- a/packages/job-worker/src/rundownPlaylists.ts +++ b/packages/job-worker/src/rundownPlaylists.ts @@ -236,6 +236,11 @@ export function produceRundownPlaylistInfoFromRundown( nextPartInfo: null, previousPartInfo: null, rundownIdsInOrder: [], + tTimers: [ + { index: 1, label: '', mode: null }, + { index: 2, label: '', mode: null }, + { index: 3, label: '', mode: null }, + ], ...clone(existingPlaylist), @@ -332,6 +337,11 @@ function defaultPlaylistForRundown( nextPartInfo: null, previousPartInfo: null, rundownIdsInOrder: [], + tTimers: [ + { index: 1, label: '', mode: null }, + { index: 2, label: '', mode: null }, + { index: 3, label: '', mode: null }, + ], ...clone(existingPlaylist), diff --git a/packages/job-worker/src/studio/model/StudioBaselineHelper.ts b/packages/job-worker/src/studio/model/StudioBaselineHelper.ts index 99336b67c6..e8d4908465 100644 --- a/packages/job-worker/src/studio/model/StudioBaselineHelper.ts +++ b/packages/job-worker/src/studio/model/StudioBaselineHelper.ts @@ -1,16 +1,24 @@ import { JobContext } from '../../jobs/index.js' import { ExpectedPackageDB, - ExpectedPackageDBFromStudioBaselineObjects, ExpectedPackageDBType, + ExpectedPackageIngestSourceStudioBaseline, + getExpectedPackageId, } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItemStudio } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' import { saveIntoDb } from '../../db/changes.js' +import { ExpectedPackage } from '@sofie-automation/blueprints-integration' +import type { IngestExpectedPackage } from '../../ingest/model/IngestExpectedPackage.js' +import { sanitiseExpectedPackages } from '../../ingest/expectedPackages.js' +import { ExpectedPackageId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { Complete } from '@sofie-automation/corelib/dist/lib' export class StudioBaselineHelper { readonly #context: JobContext - #pendingExpectedPackages: ExpectedPackageDBFromStudioBaselineObjects[] | undefined + #pendingExpectedPackages: + | Map> + | undefined #pendingExpectedPlayoutItems: ExpectedPlayoutItemStudio[] | undefined constructor(context: JobContext) { @@ -21,8 +29,26 @@ export class StudioBaselineHelper { return !!this.#pendingExpectedPackages || !!this.#pendingExpectedPlayoutItems } - setExpectedPackages(packages: ExpectedPackageDBFromStudioBaselineObjects[]): void { - this.#pendingExpectedPackages = packages + setExpectedPackages(packages: ExpectedPackage.Any[]): void { + sanitiseExpectedPackages(packages) + + // Using a map here is a bit excessive, but it makes it easier to remove duplicates + this.#pendingExpectedPackages = new Map() + for (const expectedPackage of packages) { + const id = getExpectedPackageId(this.#context.studioId, expectedPackage) + + this.#pendingExpectedPackages.set(id, { + packageId: id, + + package: expectedPackage, + + source: { + fromPieceType: ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS, + blueprintPackageId: expectedPackage._id, + listenToPackageInfoUpdates: expectedPackage.listenToPackageInfoUpdates, + }, + } satisfies IngestExpectedPackage) + } } setExpectedPlayoutItems(playoutItems: ExpectedPlayoutItemStudio[]): void { this.#pendingExpectedPlayoutItems = playoutItems @@ -39,14 +65,39 @@ export class StudioBaselineHelper { ) : undefined, this.#pendingExpectedPackages - ? saveIntoDb( + ? // We can be naive here, as we know the packages will have only one owner (the studio baseline) + saveIntoDb( this.#context, this.#context.directCollections.ExpectedPackages, { studioId: this.#context.studioId, - fromPieceType: ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS, + rundownId: null, + bucketId: null, }, - this.#pendingExpectedPackages + Array.from(this.#pendingExpectedPackages.values()).map( + (pkg) => + ({ + _id: pkg.packageId, + studioId: this.#context.studioId, + rundownId: null, + bucketId: null, + + created: Date.now(), + package: pkg.package, + ingestSources: [pkg.source], + playoutSources: { + // This doesn't belong to a rundown, so can't be referenced by playout + pieceInstanceIds: [], + }, + }) satisfies Complete + ), + { + beforeDiff: (doc, oldDoc) => { + // Preserve the created date + doc.created = oldDoc.created + return doc + }, + } ) : undefined, ]) diff --git a/packages/job-worker/src/studio/model/StudioPlayoutModel.ts b/packages/job-worker/src/studio/model/StudioPlayoutModel.ts index fefc0e8914..33145c4e6e 100644 --- a/packages/job-worker/src/studio/model/StudioPlayoutModel.ts +++ b/packages/job-worker/src/studio/model/StudioPlayoutModel.ts @@ -1,15 +1,15 @@ -import { RundownPlaylistId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' -import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' -import { +import type { RundownPlaylistId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import type { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' +import type { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import type { TimelineComplete, TimelineCompleteGenerationVersions, TimelineObjGeneric, } from '@sofie-automation/corelib/dist/dataModel/Timeline' -import { BaseModel } from '../../modelBase.js' -import { ReadonlyDeep } from 'type-fest' -import { ExpectedPackageDBFromStudioBaselineObjects } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' -import { ExpectedPlayoutItemStudio } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' +import type { BaseModel } from '../../modelBase.js' +import type { ReadonlyDeep } from 'type-fest' +import type { ExpectedPlayoutItemStudio } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' +import type { ExpectedPackage } from '@sofie-automation/blueprints-integration' export interface StudioPlayoutModelBaseReadonly { /** @@ -35,7 +35,7 @@ export interface StudioPlayoutModelBase extends StudioPlayoutModelBaseReadonly { * Update the ExpectedPackages for the StudioBaseline of the current Studio * @param packages ExpectedPackages to store */ - setExpectedPackagesForStudioBaseline(packages: ExpectedPackageDBFromStudioBaselineObjects[]): void + setExpectedPackagesForStudioBaseline(packages: ExpectedPackage.Any[]): void /** * Update the ExpectedPlayoutItems for the StudioBaseline of the current Studio * @param playoutItems ExpectedPlayoutItems to store diff --git a/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts b/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts index 50f9cb291a..55a8e97808 100644 --- a/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts +++ b/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts @@ -15,9 +15,9 @@ import { IS_PRODUCTION } from '../../environment.js' import { logger } from '../../logging.js' import { StudioPlayoutModel } from './StudioPlayoutModel.js' import { DatabasePersistedModel } from '../../modelBase.js' -import { ExpectedPackageDBFromStudioBaselineObjects } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItemStudio } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' import { StudioBaselineHelper } from './StudioBaselineHelper.js' +import { ExpectedPackage } from '@sofie-automation/blueprints-integration' /** * This is a model used for studio operations. @@ -82,7 +82,7 @@ export class StudioPlayoutModelImpl implements StudioPlayoutModel { return this.context.studio.settings.multiGatewayNowSafeLatency } - setExpectedPackagesForStudioBaseline(packages: ExpectedPackageDBFromStudioBaselineObjects[]): void { + setExpectedPackagesForStudioBaseline(packages: ExpectedPackage.Any[]): void { this.#baselineHelper.setExpectedPackages(packages) } setExpectedPlayoutItemsForStudioBaseline(playoutItems: ExpectedPlayoutItemStudio[]): void { diff --git a/packages/job-worker/src/workers/context/JobContextImpl.ts b/packages/job-worker/src/workers/context/JobContextImpl.ts index 08d6a7dc82..d47dd309c0 100644 --- a/packages/job-worker/src/workers/context/JobContextImpl.ts +++ b/packages/job-worker/src/workers/context/JobContextImpl.ts @@ -1,5 +1,5 @@ import { IDirectCollections } from '../../db/index.js' -import { JobContext, JobStudio } from '../../jobs/index.js' +import { JobContext, JobStudio, QueueJobOptions } from '../../jobs/index.js' import { WorkerDataCache } from '../caches.js' import { RundownId, RundownPlaylistId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { getIngestQueueName, IngestJobFunc } from '@sofie-automation/corelib/dist/worker/ingest' @@ -137,13 +137,17 @@ export class JobContextImpl extends StudioCacheContextImpl implements JobContext } async queueIngestJob(name: T, data: Parameters[0]): Promise { - await this.queueJob(getIngestQueueName(this.studioId), name, data) + await this.queueJob(getIngestQueueName(this.studioId), name, data, undefined) } - async queueStudioJob(name: T, data: Parameters[0]): Promise { - await this.queueJob(getStudioQueueName(this.studioId), name, data) + async queueStudioJob( + name: T, + data: Parameters[0], + options?: QueueJobOptions + ): Promise { + await this.queueJob(getStudioQueueName(this.studioId), name, data, options) } async queueEventJob(name: T, data: Parameters[0]): Promise { - await this.queueJob(getEventsQueueName(this.studioId), name, data) + await this.queueJob(getEventsQueueName(this.studioId), name, data, undefined) } hackPublishTimelineToFastTrack(newTimeline: TimelineComplete): void { diff --git a/packages/job-worker/src/workers/context/util.ts b/packages/job-worker/src/workers/context/util.ts index 38ac084220..3320453677 100644 --- a/packages/job-worker/src/workers/context/util.ts +++ b/packages/job-worker/src/workers/context/util.ts @@ -1 +1,8 @@ -export type QueueJobFunc = (queueName: string, jobName: string, jobData: unknown) => Promise +import type { QueueJobOptions } from '../../jobs/index.js' + +export type QueueJobFunc = ( + queueName: string, + jobName: string, + jobData: unknown, + options: QueueJobOptions | undefined +) => Promise diff --git a/packages/job-worker/src/workers/ingest/jobs.ts b/packages/job-worker/src/workers/ingest/jobs.ts index b8cdbb0779..2bc85736ca 100644 --- a/packages/job-worker/src/workers/ingest/jobs.ts +++ b/packages/job-worker/src/workers/ingest/jobs.ts @@ -29,7 +29,7 @@ import { handleUpdatedSegment, handleUpdatedSegmentRanks, } from '../../ingest/ingestSegmentJobs.js' -import { handleExpectedPackagesRegenerate, handleUpdatedPackageInfoForRundown } from '../../ingest/packageInfo.js' +import { handleUpdatedPackageInfoForRundown } from '../../ingest/packageInfo.js' import { handleBucketActionModify, handleBucketActionRegenerateExpectedPackages, @@ -81,7 +81,6 @@ export const ingestJobHandlers: IngestJobHandlers = { [IngestJobs.MosMoveStory]: wrapMosIngestJob(handleMosMoveStories), [IngestJobs.MosSwapStory]: wrapMosIngestJob(handleMosSwapStories), - [IngestJobs.ExpectedPackagesRegenerate]: handleExpectedPackagesRegenerate, [IngestJobs.PackageInfosUpdatedRundown]: handleUpdatedPackageInfoForRundown, [IngestJobs.UserRemoveRundown]: handleUserRemoveRundown, diff --git a/packages/job-worker/src/workers/studio/jobs.ts b/packages/job-worker/src/workers/studio/jobs.ts index 4e08fd7edb..be5d81787d 100644 --- a/packages/job-worker/src/workers/studio/jobs.ts +++ b/packages/job-worker/src/workers/studio/jobs.ts @@ -48,6 +48,7 @@ import { handleClearQuickLoopMarkers, handleSetQuickLoopMarker } from '../../pla import { handleActivateAdlibTesting } from '../../playout/adlibTesting.js' import { handleExecuteBucketAdLibOrAction } from '../../playout/bucketAdlibJobs.js' import { handleSwitchRouteSet } from '../../studio/routeSet.js' +import { handleCleanupOrphanedExpectedPackageReferences } from '../../playout/expectedPackages.js' type ExecutableFunction = ( context: JobContext, @@ -110,4 +111,6 @@ export const studioJobHandlers: StudioJobHandlers = { [StudioJobs.ClearQuickLoopMarkers]: handleClearQuickLoopMarkers, [StudioJobs.SwitchRouteSet]: handleSwitchRouteSet, + + [StudioJobs.CleanupOrphanedExpectedPackageReferences]: handleCleanupOrphanedExpectedPackageReferences, } diff --git a/packages/live-status-gateway/src/topics/__tests__/utils.ts b/packages/live-status-gateway/src/topics/__tests__/utils.ts index 576b1cb743..23b70507c1 100644 --- a/packages/live-status-gateway/src/topics/__tests__/utils.ts +++ b/packages/live-status-gateway/src/topics/__tests__/utils.ts @@ -34,6 +34,7 @@ export function makeTestPlaylist(id?: string): DBRundownPlaylist { studioId: protectString('STUDIO_1'), timing: { type: PlaylistTimingType.None }, publicData: { a: 'b' }, + tTimers: [] as any, } } diff --git a/packages/meteor-lib/src/collections/ExpectedPackages.ts b/packages/meteor-lib/src/collections/ExpectedPackages.ts index ca7d656a5c..5815971453 100644 --- a/packages/meteor-lib/src/collections/ExpectedPackages.ts +++ b/packages/meteor-lib/src/collections/ExpectedPackages.ts @@ -6,6 +6,7 @@ import { htmlTemplateGetSteps, htmlTemplateGetFileNamesFromSteps, } from '@sofie-automation/shared-lib/dist/package-manager/helpers' +import { ReadonlyDeep } from 'type-fest' export function getPreviewPackageSettings( expectedPackage: ExpectedPackage.Any @@ -55,7 +56,7 @@ export function getThumbnailPackageSettings( } } export function getSideEffect( - expectedPackage: ExpectedPackage.Base, + expectedPackage: ReadonlyDeep, studio: Pick ): ExpectedPackage.Base['sideEffect'] { return deepExtend( diff --git a/packages/shared-lib/src/package-manager/package.ts b/packages/shared-lib/src/package-manager/package.ts index 052c149208..0b70f6460b 100644 --- a/packages/shared-lib/src/package-manager/package.ts +++ b/packages/shared-lib/src/package-manager/package.ts @@ -40,7 +40,7 @@ export namespace ExpectedPackage { /** Generic (used in extends) */ export interface Base { - /** Unique id of the expectedPackage */ + /** Blueprint defined unique id of the package within the rundown/bucket */ _id: ExpectedPackageId /** Reference to which timeline-layer(s) the Package is going to be used in. * (Used to route the package to the right playout-device (targets)) diff --git a/packages/shared-lib/src/package-manager/publications.ts b/packages/shared-lib/src/package-manager/publications.ts index bd4f0c1d1c..99a097af6b 100644 --- a/packages/shared-lib/src/package-manager/publications.ts +++ b/packages/shared-lib/src/package-manager/publications.ts @@ -1,6 +1,7 @@ import { ExpectedPackage, PackageContainer, PackageContainerOnPackage } from './package.js' -import { PeripheralDeviceId, PieceInstanceId, RundownId, RundownPlaylistId } from '../core/model/Ids.js' +import { ExpectedPackageId, PeripheralDeviceId, RundownId, RundownPlaylistId } from '../core/model/Ids.js' import { ProtectedString } from '../lib/protectedString.js' +import { ReadonlyDeep } from 'type-fest' export interface PackageManagerPlayoutContext { _id: PeripheralDeviceId @@ -27,18 +28,18 @@ export interface PackageManagerPackageContainers { export type PackageManagerExpectedPackageId = ProtectedString<'PackageManagerExpectedPackage'> -export type PackageManagerExpectedPackageBase = ExpectedPackage.Base & { rundownId?: RundownId } +export type PackageManagerExpectedPackageBase = ReadonlyDeep> & { + _id: ExpectedPackageId +} export interface PackageManagerExpectedPackage { /** Unique id of the expectedPackage */ _id: PackageManagerExpectedPackageId expectedPackage: PackageManagerExpectedPackageBase - /** Lower should be done first */ - priority: number + /** Lower should be done first. If not set, the priority is considered as low priority */ + priority: number | null sources: PackageContainerOnPackage[] targets: PackageContainerOnPackage[] playoutDeviceId: PeripheralDeviceId - - pieceInstanceId: PieceInstanceId | null } diff --git a/packages/webui/src/__mocks__/defaultCollectionObjects.ts b/packages/webui/src/__mocks__/defaultCollectionObjects.ts index 7434f499fb..161bbec448 100644 --- a/packages/webui/src/__mocks__/defaultCollectionObjects.ts +++ b/packages/webui/src/__mocks__/defaultCollectionObjects.ts @@ -48,6 +48,11 @@ export function defaultRundownPlaylist(_id: RundownPlaylistId, studioId: StudioI type: 'none' as any, }, rundownIdsInOrder: [], + tTimers: [ + { index: 1, label: '', mode: null }, + { index: 2, label: '', mode: null }, + { index: 3, label: '', mode: null }, + ], } } export function defaultRundown( diff --git a/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts b/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts index 8e402449d9..f57f33d4ed 100644 --- a/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts +++ b/packages/webui/src/client/lib/__tests__/rundownTiming.test.ts @@ -28,6 +28,12 @@ function makeMockPlaylist(): DBRundownPlaylist { type: PlaylistTimingType.None, }, rundownIdsInOrder: [], + + tTimers: [ + { index: 1, label: '', mode: null }, + { index: 2, label: '', mode: null }, + { index: 3, label: '', mode: null }, + ], }) } diff --git a/packages/webui/src/client/ui/RundownView/RundownHeader/RundownHeader.tsx b/packages/webui/src/client/ui/RundownView/RundownHeader/RundownHeader.tsx index ffbf52f145..a57f7b3ff8 100644 --- a/packages/webui/src/client/ui/RundownView/RundownHeader/RundownHeader.tsx +++ b/packages/webui/src/client/ui/RundownView/RundownHeader/RundownHeader.tsx @@ -106,6 +106,10 @@ export function RundownHeader({ const rundownTimesInfo = checkRundownTimes(playlist.timing) + useEffect(() => { + console.debug(`Rundown T-Timers Info: `, playlist.tTimers) + }, [playlist.tTimers]) + return ( <> diff --git a/packages/webui/src/client/ui/Settings/DevicePackageManagerSettings.tsx b/packages/webui/src/client/ui/Settings/DevicePackageManagerSettings.tsx index 82d0408991..738359eac7 100644 --- a/packages/webui/src/client/ui/Settings/DevicePackageManagerSettings.tsx +++ b/packages/webui/src/client/ui/Settings/DevicePackageManagerSettings.tsx @@ -23,38 +23,55 @@ export const DevicePackageManagerSettings: React.FC PeripheralDevices.findOne(deviceId), [deviceId], undefined) - const reloadingNow = useRef(false) + const reloadingNow = useRef(null) const [status, setStatus] = useState(undefined) - const reloadStatus = useCallback((silent = false) => { - if (reloadingNow.current) return // if there is a method currently being executed, skip + const reloadStatus = useCallback( + (silent = false) => { + if (reloadingNow.current === deviceId) return // if there is a method currently being executed, skip - reloadingNow.current = true + reloadingNow.current = deviceId - MeteorCall.client - .callBackgroundPeripheralDeviceFunction(deviceId, 1000, 'getExpetationManagerStatus') - .then((result: Status) => setStatus(result)) - .catch((error) => { - if (silent) { - logger.error('callBackgroundPeripheralDeviceFunction getExpetationManagerStatus', error) - return - } + MeteorCall.client + .callBackgroundPeripheralDeviceFunction(deviceId, 1000, 'getExpetationManagerStatus') + .then((result: Status) => { + if (reloadingNow.current !== deviceId) return // if the deviceId has changed, abort - doModalDialog({ - message: t('There was an error: {{error}}', { error: error.toString() }), - title: t('Error'), - warning: true, - onAccept: () => { - // Do nothing - }, + setStatus(result) }) - }) - .finally(() => { - reloadingNow.current = false - }) - }, []) + .catch((error) => { + if (reloadingNow.current !== deviceId) return // if the deviceId has changed, abort + + if (silent) { + logger.error('callBackgroundPeripheralDeviceFunction getExpetationManagerStatus', error) + return + } + + doModalDialog({ + message: t('There was an error: {{error}}', { error: error.toString() }), + title: t('Error'), + warning: true, + onAccept: () => { + // Do nothing + }, + }) + }) + .finally(() => { + if (reloadingNow.current === deviceId) { + reloadingNow.current = null + } + }) + }, + [deviceId] + ) useEffect(() => { + // Clear cached status when deviceId changes + setStatus(undefined) + + // Trigger a load now + reloadStatus(true) + const reloadInterval = Meteor.setInterval(() => { if (deviceId) { reloadStatus(true) @@ -64,7 +81,7 @@ export const DevicePackageManagerSettings: React.FC { Meteor.clearInterval(reloadInterval) } - }, []) + }, [deviceId, reloadStatus]) function killApp(e: string, appId: string) { MeteorCall.client diff --git a/packages/webui/src/client/ui/Status/package-status/PackageStatus.tsx b/packages/webui/src/client/ui/Status/package-status/PackageStatus.tsx index 6675919fef..8b5180dac6 100644 --- a/packages/webui/src/client/ui/Status/package-status/PackageStatus.tsx +++ b/packages/webui/src/client/ui/Status/package-status/PackageStatus.tsx @@ -25,7 +25,7 @@ export const PackageStatus: React.FC<{ const { t } = useTranslation() const getPackageName = useCallback((): string => { - const p2: ExpectedPackage.Any = props.package as any + const p2 = props.package.package as ExpectedPackage.Any if (p2.type === ExpectedPackage.PackageType.MEDIA_FILE) { return p2.content.filePath || unprotectString(props.package._id) } else if (p2.type === ExpectedPackage.PackageType.QUANTEL_CLIP) { @@ -87,7 +87,7 @@ export const PackageStatus: React.FC<{ return 0 }) - }, props.statuses) + }, [props.statuses]) let offlineReasonMessage: string | undefined = undefined let connected = true diff --git a/packages/webui/src/client/ui/Status/package-status/index.tsx b/packages/webui/src/client/ui/Status/package-status/index.tsx index 5c9baa0d6f..4fea26d793 100644 --- a/packages/webui/src/client/ui/Status/package-status/index.tsx +++ b/packages/webui/src/client/ui/Status/package-status/index.tsx @@ -68,11 +68,6 @@ export const ExpectedPackagesStatus: React.FC<{}> = function ExpectedPackagesSta ) } function renderExpectedPackageStatuses() { - const packageRef: { [packageId: string]: ExpectedPackageDB } = {} - for (const expPackage of expectedPackages) { - packageRef[unprotectString(expPackage._id)] = expPackage - } - const packagesWithWorkStatuses: { [packageId: string]: { package: ExpectedPackageDB | undefined @@ -80,20 +75,33 @@ export const ExpectedPackagesStatus: React.FC<{}> = function ExpectedPackagesSta device: PeripheralDevice | undefined } } = {} + + for (const expPackage of expectedPackages) { + packagesWithWorkStatuses[unprotectString(expPackage._id)] = { + package: expPackage, + statuses: [], + device: undefined, + } + } + for (const work of expectedPackageWorkStatuses) { - const device = peripheralDevicesMap.get(work.deviceId) // todo: make this better: - const key = unprotectString(work.fromPackages[0]?.id) || 'unknown_work_' + work._id - // const referencedPackage = packageRef[packageId] - let packageWithWorkStatus = packagesWithWorkStatuses[key] - if (!packageWithWorkStatus) { - packagesWithWorkStatuses[key] = packageWithWorkStatus = { - package: packageRef[key] || undefined, - statuses: [], - device, + let fromPackageIds = work.fromPackages.map((p) => unprotectString(p.id)) + if (fromPackageIds.length === 0) fromPackageIds = ['unknown_work_' + work._id] + + for (const key of fromPackageIds) { + // const referencedPackage = packageRef[packageId] + let packageWithWorkStatus = packagesWithWorkStatuses[key] + if (!packageWithWorkStatus) { + packagesWithWorkStatuses[key] = packageWithWorkStatus = { + package: undefined, + statuses: [], + device: undefined, + } } + packageWithWorkStatus.statuses.push(work) + packageWithWorkStatus.device = peripheralDevicesMap.get(work.deviceId) } - packageWithWorkStatus.statuses.push(work) } for (const id of Object.keys(packagesWithWorkStatuses)) { @@ -150,6 +158,7 @@ export const ExpectedPackagesStatus: React.FC<{}> = function ExpectedPackagesSta function renderPackageContainerStatuses() { return packageContainerStatuses.map((packageContainerStatus) => { const device = peripheralDevicesMap.get(packageContainerStatus.deviceId) + console.log(device, packageContainerStatus.deviceId) return (