From 23db57bf6f91de04885f7e645ad65409367a2c12 Mon Sep 17 00:00:00 2001 From: Paul Elliott Date: Sat, 29 Nov 2025 17:58:53 -0500 Subject: [PATCH 01/25] feat: support sparse manifest.json in state files Enable loading state files with minimal manifest.json containing just dataSources and optional tools. Made most manifest fields optional so Python tooling can generate session files without specifying layouts, views, or other defaults. Views now auto-assign the first loaded dataset when viewByID is not provided. --- src/io/import/processors/restoreStateFile.ts | 53 +++++--- src/io/state-file/schema.ts | 64 ++++------ src/store/datasets-layers.ts | 4 +- src/store/datasets.ts | 14 ++- src/store/segmentGroups.ts | 4 + src/store/tools/crop.ts | 4 +- src/store/tools/crosshairs.ts | 9 +- src/store/tools/index.ts | 7 +- src/store/tools/paint.ts | 15 ++- src/store/tools/polygons.ts | 3 +- src/store/tools/rectangles.ts | 3 +- src/store/tools/rulers.ts | 3 +- src/store/view-configs.ts | 2 + src/store/view-configs/common.ts | 8 +- src/store/views.ts | 41 ++++--- tests/specs/sparse-manifest.e2e.ts | 122 +++++++++++++++++++ 16 files changed, 260 insertions(+), 96 deletions(-) create mode 100644 tests/specs/sparse-manifest.e2e.ts diff --git a/src/io/import/processors/restoreStateFile.ts b/src/io/import/processors/restoreStateFile.ts index 2d19359bd..8a58ee928 100644 --- a/src/io/import/processors/restoreStateFile.ts +++ b/src/io/import/processors/restoreStateFile.ts @@ -26,7 +26,6 @@ import extractArchiveTarget from '@/src/io/import/processors/extractArchiveTarge import { ChainHandler, evaluateChain, Skip } from '@/src/utils/evaluateChain'; import openUriStream from '@/src/io/import/processors/openUriStream'; import updateUriType from '@/src/io/import/processors/updateUriType'; -import handleDicomStream from '@/src/io/import/processors/handleDicomStream'; import downloadStream from '@/src/io/import/processors/downloadStream'; import { FileEntry } from '@/src/io/types'; import { useViewStore } from '@/src/store/views'; @@ -327,8 +326,8 @@ const resolvingHandlers: ResolvingImportHandler[] = [ updateFileMimeType, updateUriType, - // stream handling - handleDicomStream, + // stream handling - only download, not DICOM processing + // DICOM processing happens in the main import pipeline downloadStream, extractArchiveTarget, @@ -378,7 +377,7 @@ async function rebuildDataSources( return { type: 'uri', uri: serialized.uri, - name: serialized.name, + name: serialized.name ?? serialized.uri, mime: serialized.mime, }; case 'collection': { @@ -445,7 +444,14 @@ async function restoreDatasets( datasetFiles: FileEntry[], context?: ImportContext ) { - const { datasets, dataSources, datasetFilePath } = manifest; + const { dataSources } = manifest; + const datasets = + manifest.datasets ?? + dataSources + .filter((ds) => ds.type === 'uri') + .map((ds) => ({ id: String(ds.id), dataSourceId: ds.id })); + const datasetFilePath = manifest.datasetFilePath ?? {}; + const dataSourceIDToStateID = datasets.reduce>( (acc, ds) => Object.assign(acc, { @@ -480,13 +486,16 @@ async function restoreDatasets( await Promise.all( [...leaves].map(async (leafId) => { const dataSource = dataSourceCache[leafId]; - const importResult = + const importResults = (await context?.importDataSources?.([dataSource])) ?? []; - const [result] = importResult; - if (result?.type !== 'data' || importResult.length !== 1) - throw new Error('Expected a single dataset'); - stateIDToStoreID[dataSourceIDToStateID[leafId]] = result.dataID; + const dataResults = importResults.filter( + (r): r is typeof r & { type: 'data' } => r.type === 'data' + ); + + if (dataResults.length > 0) { + stateIDToStoreID[dataSourceIDToStateID[leafId]] = dataResults[0].dataID; + } }) ); @@ -518,9 +527,10 @@ const restoreStateFile: ImportHandler = async (dataSource, context) => { ); } - // We restore the view first, so that the appropriate watchers are triggered - // in the views as the data is loaded - useViewStore().setLayout(manifest.layout); + // Set layout if provided, otherwise use existing default layout + if (manifest.layout) { + useViewStore().setLayout(manifest.layout); + } const stateIDToStoreID = await restoreDatasets( manifest, @@ -528,10 +538,19 @@ const restoreStateFile: ImportHandler = async (dataSource, context) => { context ); - // Restore the views - useViewStore().deserialize(manifest, stateIDToStoreID); + // Restore the views (handles missing viewByID gracefully) + const viewStore = useViewStore(); + viewStore.deserialize(manifest, stateIDToStoreID); + + // When viewByID is not in manifest, assign first dataset to all views + if (!manifest.viewByID) { + const firstDataID = Object.values(stateIDToStoreID)[0]; + if (firstDataID) { + viewStore.setDataForAllViews(firstDataID); + } + } - // Restore view configs + // Restore view configs (handles missing configs gracefully) useViewConfigStore().deserializeAll(manifest, stateIDToStoreID); // Restore the labelmaps @@ -541,7 +560,7 @@ const restoreStateFile: ImportHandler = async (dataSource, context) => { stateIDToStoreID ); - // Restore the tools + // Restore the tools (each tool handles missing data gracefully) useToolStore().deserialize(manifest, segmentGroupIDMap, stateIDToStoreID); useLayersStore().deserialize(manifest, stateIDToStoreID); diff --git a/src/io/state-file/schema.ts b/src/io/state-file/schema.ts index c3081bae2..5ce0c9d29 100644 --- a/src/io/state-file/schema.ts +++ b/src/io/state-file/schema.ts @@ -7,7 +7,7 @@ import type { PiecewiseNode, } from '@kitware/vtk.js/Proxy/Core/PiecewiseFunctionProxy'; -import type { AnnotationTool, ToolID } from '@/src/types/annotation-tool'; +import type { ToolID } from '@/src/types/annotation-tool'; import { Tools as ToolsEnum } from '@/src/store/tools/types'; import type { Ruler } from '@/src/types/ruler'; import type { Rectangle } from '@/src/types/rectangle'; @@ -54,7 +54,7 @@ const UriSource = z.object({ id: z.number(), type: z.literal('uri'), uri: z.string(), - name: z.string(), + name: z.string().optional(), mime: z.string().optional(), parent: z.number().optional(), }); @@ -329,20 +329,18 @@ const FrameOfReference = z.object({ planeNormal: Vector3, }) satisfies z.ZodType; -type SerializedAnnotationTool = Omit; - const annotationTool = z.object({ imageID: z.string(), frameOfReference: FrameOfReference, slice: z.number(), - id: z.string() as unknown as z.ZodType, - name: z.string(), - color: z.string(), + id: z.string().optional() as unknown as z.ZodType, + name: z.string().optional(), + color: z.string().optional(), strokeWidth: z.number().optional(), label: z.string().optional(), labelName: z.string().optional(), metadata: z.record(z.string(), z.string()).optional(), -}) satisfies z.ZodType; +}); const makeToolEntry = (tool: z.ZodObject) => z.object({ @@ -350,35 +348,27 @@ const makeToolEntry = (tool: z.ZodObject) => labels: z.record(z.string(), tool.partial()), }); -type SerializedRuler = Omit; - const Ruler = annotationTool.extend({ firstPoint: Vector3, secondPoint: Vector3, -}) satisfies z.ZodType; +}); const Rulers = makeToolEntry(Ruler); -type SerializedRectangle = Omit & - Partial>; - const Rectangle = Ruler.extend({ fillColor: z.string().optional(), -}) satisfies z.ZodType; +}); const Rectangles = makeToolEntry(Rectangle); -type SerializedPolygon = Omit; - const Polygon = annotationTool.extend({ - id: z.string() as unknown as z.ZodType, points: z.array(Vector3), -}) satisfies z.ZodType; +}); const Polygons = makeToolEntry(Polygon); const Crosshairs = z.object({ - position: Vector3, + position: Vector3.optional(), }); export type Crosshairs = z.infer; @@ -386,11 +376,11 @@ export type Crosshairs = z.infer; const ToolsEnumNative = z.nativeEnum(ToolsEnum); const Paint = z.object({ - activeSegmentGroupID: z.string().nullable(), + activeSegmentGroupID: z.string().nullable().optional(), activeSegment: z.number().nullish(), - brushSize: z.number(), - crossPlaneSync: z.boolean().default(false), - labelmapOpacity: z.number().optional(), // labelmapOpacity now ignored. Opacity per segment group via layerColoring store. + brushSize: z.number().optional(), + crossPlaneSync: z.boolean().optional(), + labelmapOpacity: z.number().optional(), }); const LPSCroppingPlanes = z.object({ @@ -405,10 +395,10 @@ const Tools = z.object({ rulers: Rulers.optional(), rectangles: Rectangles.optional(), polygons: Polygons.optional(), - crosshairs: Crosshairs, - paint: Paint, - crop: Cropping, - current: ToolsEnumNative, + crosshairs: Crosshairs.optional(), + paint: Paint.optional(), + crop: Cropping.optional(), + current: ToolsEnumNative.optional(), }); export type Tools = z.infer; @@ -424,18 +414,18 @@ export type ParentToLayers = z.infer; export const ManifestSchema = z.object({ version: z.string(), - datasets: Dataset.array(), + datasets: Dataset.array().optional(), dataSources: DataSource.array(), - datasetFilePath: z.record(z.string(), z.string()), - labelMaps: LabelMap.array(), - tools: Tools, + datasetFilePath: z.record(z.string(), z.string()).optional(), + labelMaps: LabelMap.array().optional(), + tools: Tools.optional(), activeView: z.string().optional().nullable(), - isActiveViewMaximized: z.boolean(), - viewByID: z.record(z.string(), View), + isActiveViewMaximized: z.boolean().optional(), + viewByID: z.record(z.string(), View).optional(), primarySelection: z.string().optional(), - layout: Layout, - layoutSlots: z.array(z.string()), - parentToLayers: ParentToLayers, + layout: Layout.optional(), + layoutSlots: z.array(z.string()).optional(), + parentToLayers: ParentToLayers.optional(), }); export type Manifest = z.infer; diff --git a/src/store/datasets-layers.ts b/src/store/datasets-layers.ts index c444f251d..5e1a35561 100644 --- a/src/store/datasets-layers.ts +++ b/src/store/datasets-layers.ts @@ -124,11 +124,13 @@ export const useLayersStore = defineStore('layer', () => { } function deserialize(manifest: Manifest, dataIDMap: Record) { + const parentToLayersSerialized = manifest.parentToLayers; + if (!parentToLayersSerialized) return; + const remapSelection = (selection: DataSelection) => { return dataIDMap[selection]; }; - const { parentToLayers: parentToLayersSerialized } = manifest; parentToLayersSerialized.forEach( ({ selectionKey, sourceSelectionKeys }) => { const parent = remapSelection(selectionKey); diff --git a/src/store/datasets.ts b/src/store/datasets.ts index 95c39e247..05a257f76 100644 --- a/src/store/datasets.ts +++ b/src/store/datasets.ts @@ -157,12 +157,14 @@ export const useDatasetStore = defineStore('dataset', () => { manifest.dataSources = serializedDependencies; // add any locally loaded files - manifest.datasetFilePath = {}; - Object.entries(files).forEach(([fileId, file]) => { - const filePath = `data/${fileId}/${file.name}`; - zip.file(filePath, file); - manifest.datasetFilePath[fileId] = filePath; - }); + if (Object.keys(files).length > 0) { + manifest.datasetFilePath = {}; + Object.entries(files).forEach(([fileId, file]) => { + const filePath = `data/${fileId}/${file.name}`; + zip.file(filePath, file); + manifest.datasetFilePath![fileId] = filePath; + }); + } } const remove = (id: string | null) => { diff --git a/src/store/segmentGroups.ts b/src/store/segmentGroups.ts index e8c1959fc..0cd2da4fe 100644 --- a/src/store/segmentGroups.ts +++ b/src/store/segmentGroups.ts @@ -501,6 +501,10 @@ export const useSegmentGroupStore = defineStore('segmentGroup', () => { const segmentGroupIDMap: Record = {}; + if (!labelMaps || labelMaps.length === 0) { + return segmentGroupIDMap; + } + // First restore the data, then restore the store. // This preserves ordering from orderByParent. diff --git a/src/store/tools/crop.ts b/src/store/tools/crop.ts index 771f9b986..f85b009e3 100644 --- a/src/store/tools/crop.ts +++ b/src/store/tools/crop.ts @@ -162,11 +162,13 @@ export const useCropStore = defineStore('crop', () => { function serialize(stateFile: StateFile) { const { tools } = stateFile.manifest; + if (!tools) return; tools.crop = state.croppingByImageID; } function deserialize(manifest: Manifest, dataIDMap: Record) { - const cropping = manifest.tools.crop; + const cropping = manifest.tools?.crop; + if (!cropping) return; Object.entries(cropping).forEach(([imageID, planes]) => { const newImageID = dataIDMap[imageID]; diff --git a/src/store/tools/crosshairs.ts b/src/store/tools/crosshairs.ts index 3e495aae2..6c6c861bc 100644 --- a/src/store/tools/crosshairs.ts +++ b/src/store/tools/crosshairs.ts @@ -111,13 +111,16 @@ export const useCrosshairsToolStore = defineStore('crosshairs', () => { } function serialize(state: StateFile) { - const { crosshairs } = state.manifest.tools; + const crosshairs = state.manifest.tools?.crosshairs; + if (!crosshairs) return; crosshairs.position = position.value; } function deserialize(manifest: Manifest) { - const { crosshairs } = manifest.tools; - position.value = crosshairs.position; + const crosshairsPosition = manifest.tools?.crosshairs?.position; + if (crosshairsPosition) { + position.value = crosshairsPosition; + } } return { diff --git a/src/store/tools/index.ts b/src/store/tools/index.ts index a60b2f073..0a95073b0 100644 --- a/src/store/tools/index.ts +++ b/src/store/tools/index.ts @@ -89,6 +89,7 @@ export const useToolStore = defineStore('tool', () => { function serialize(state: StateFile) { const { tools } = state.manifest; + if (!tools) return; Object.values(ToolStoreMap) .map((useStore) => useStore?.()) @@ -105,8 +106,6 @@ export const useToolStore = defineStore('tool', () => { segmentGroupIDMap: Record, dataIDMap: Record ) { - const { tools } = manifest; - usePaintToolStore().deserialize(manifest, segmentGroupIDMap); Object.values(ToolStoreMap) @@ -118,7 +117,9 @@ export const useToolStore = defineStore('tool', () => { store.deserialize?.(manifest, dataIDMap); }); - currentTool.value = tools.current; + if (manifest.tools?.current) { + currentTool.value = manifest.tools.current; + } } return { diff --git a/src/store/tools/paint.ts b/src/store/tools/paint.ts index 1a6dae776..c8258333c 100644 --- a/src/store/tools/paint.ts +++ b/src/store/tools/paint.ts @@ -370,7 +370,8 @@ export const usePaintToolStore = defineStore('paint', () => { } function serialize(state: StateFile) { - const { paint } = state.manifest.tools; + const paint = state.manifest.tools?.paint; + if (!paint) return; paint.activeSegmentGroupID = activeSegmentGroupID.value ?? null; paint.brushSize = brushSize.value; @@ -383,11 +384,15 @@ export const usePaintToolStore = defineStore('paint', () => { manifest: Manifest, segmentGroupIDMap: Record ) { - const { paint } = manifest.tools; - setBrushSize.call(this, paint.brushSize); - isActive.value = manifest.tools.current === Tools.Paint; + const paint = manifest.tools?.paint; + if (!paint) return; - if (paint.activeSegmentGroupID !== null) { + if (paint.brushSize !== undefined) { + setBrushSize.call(this, paint.brushSize); + } + isActive.value = manifest.tools?.current === Tools.Paint; + + if (paint.activeSegmentGroupID) { activeSegmentGroupID.value = segmentGroupIDMap[paint.activeSegmentGroupID]; setActiveSegmentGroup(activeSegmentGroupID.value); diff --git a/src/store/tools/polygons.ts b/src/store/tools/polygons.ts index 66300a4df..813d17e9c 100644 --- a/src/store/tools/polygons.ts +++ b/src/store/tools/polygons.ts @@ -184,11 +184,12 @@ export const usePolygonStore = defineAnnotationToolStore('polygon', () => { // --- serialization --- // function serialize(state: StateFile) { + if (!state.manifest.tools) return; state.manifest.tools.polygons = toolAPI.serializeTools(); } function deserialize(manifest: Manifest, dataIDMap: Record) { - toolAPI.deserializeTools(manifest.tools.polygons, dataIDMap); + toolAPI.deserializeTools(manifest.tools?.polygons, dataIDMap); } return { diff --git a/src/store/tools/rectangles.ts b/src/store/tools/rectangles.ts index adbb98a9d..ddf934b1b 100644 --- a/src/store/tools/rectangles.ts +++ b/src/store/tools/rectangles.ts @@ -33,11 +33,12 @@ export const useRectangleStore = defineAnnotationToolStore('rectangles', () => { // --- serialization --- // function serialize(state: StateFile) { + if (!state.manifest.tools) return; state.manifest.tools.rectangles = toolAPI.serializeTools(); } function deserialize(manifest: Manifest, dataIDMap: Record) { - toolAPI.deserializeTools(manifest.tools.rectangles, dataIDMap); + toolAPI.deserializeTools(manifest.tools?.rectangles, dataIDMap); } return { diff --git a/src/store/tools/rulers.ts b/src/store/tools/rulers.ts index f12a82cc4..b752e2d94 100644 --- a/src/store/tools/rulers.ts +++ b/src/store/tools/rulers.ts @@ -53,11 +53,12 @@ export const useRulerStore = defineAnnotationToolStore('ruler', () => { // --- serialization --- // function serialize(state: StateFile) { + if (!state.manifest.tools) return; state.manifest.tools.rulers = serializeTools(); } function deserialize(manifest: Manifest, dataIDMap: Record) { - deserializeTools(manifest.tools.rulers, dataIDMap); + deserializeTools(manifest.tools?.rulers, dataIDMap); } return { diff --git a/src/store/view-configs.ts b/src/store/view-configs.ts index adb60dfdb..81b6eb239 100644 --- a/src/store/view-configs.ts +++ b/src/store/view-configs.ts @@ -67,6 +67,8 @@ export const useViewConfigStore = defineStore('viewConfig', () => { manifest: StateFile['manifest'], dataIDMap: Record ) => { + if (!manifest.viewByID) return; + Object.entries(manifest.viewByID).forEach(([viewID, view]) => { if (view.config) { deserialize(viewID, view.config, dataIDMap); diff --git a/src/store/view-configs/common.ts b/src/store/view-configs/common.ts index b38495374..c4a67dc0d 100644 --- a/src/store/view-configs/common.ts +++ b/src/store/view-configs/common.ts @@ -12,8 +12,12 @@ const serializeViewConfig = < viewConfigs: DoubleKeyRecord, viewConfigStateKey: K ) => { - const dataIDs = stateFile.manifest.datasets.map((dataset) => dataset.id); - const views = Object.values(stateFile.manifest.viewByID); + const datasets = stateFile.manifest.datasets; + const viewByID = stateFile.manifest.viewByID; + if (!datasets || !viewByID) return; + + const dataIDs = datasets.map((dataset) => dataset.id); + const views = Object.values(viewByID); views.forEach((view) => { dataIDs.forEach((dataID) => { diff --git a/src/store/views.ts b/src/store/views.ts index 1e093695c..9e3d20071 100644 --- a/src/store/views.ts +++ b/src/store/views.ts @@ -10,7 +10,7 @@ import { parseNamedLayouts, type LayoutConfig, } from '@/src/utils/layoutParsing'; -import type { StateFile } from '../io/state-file/schema'; +import type { Manifest, StateFile } from '../io/state-file/schema'; const DEFAULT_VIEW_INIT: ViewInfoInit = { type: '2D', @@ -307,25 +307,30 @@ export const useViewStore = defineStore('view', () => { manifest.viewByID = viewByID; } - function deserialize( - manifest: StateFile['manifest'], - dataIDMap: Record - ) { - setLayout(manifest.layout); + function deserialize(manifest: Manifest, dataIDMap: Record) { + if (manifest.layout) { + setLayout(manifest.layout); + } setActiveView(manifest.activeView); - isActiveViewMaximized.value = manifest.isActiveViewMaximized; - layoutSlots.value = manifest.layoutSlots; - - viewIDs.value.forEach((key) => { - delete viewByID[key]; - }); + if (manifest.isActiveViewMaximized !== undefined) { + isActiveViewMaximized.value = manifest.isActiveViewMaximized; + } + if (manifest.layoutSlots) { + layoutSlots.value = manifest.layoutSlots; + } - Object.entries(manifest.viewByID).forEach(([id, view]) => { - viewByID[id] = { - ...view, - dataID: view.dataID ? dataIDMap[view.dataID] : null, - } as unknown as ViewInfo; - }); + if (manifest.viewByID) { + viewIDs.value.forEach((key) => { + delete viewByID[key]; + }); + + Object.entries(manifest.viewByID).forEach(([id, view]) => { + viewByID[id] = { + ...view, + dataID: view.dataID ? dataIDMap[view.dataID] : null, + } as unknown as ViewInfo; + }); + } } // initialization diff --git a/tests/specs/sparse-manifest.e2e.ts b/tests/specs/sparse-manifest.e2e.ts new file mode 100644 index 000000000..1341b5c1e --- /dev/null +++ b/tests/specs/sparse-manifest.e2e.ts @@ -0,0 +1,122 @@ +import * as path from 'path'; +import * as fs from 'fs'; +import { cleanuptotal } from 'wdio-cleanuptotal-service'; +import JSZip from 'jszip'; +import { TEMP_DIR } from '../../wdio.shared.conf'; +import { volViewPage } from '../pageobjects/volview.page'; +import { MINIMAL_DICOM } from './configTestUtils'; +import { downloadFile } from './utils'; + +async function writeManifestToZip(manifest: unknown, fileName: string) { + const filePath = path.join(TEMP_DIR, fileName); + const manifestString = JSON.stringify(manifest, null, 2); + + const zip = new JSZip(); + zip.file('manifest.json', manifestString); + const data = await zip.generateAsync({ type: 'nodebuffer' }); + + await fs.promises.writeFile(filePath, data); + cleanuptotal.addCleanup(async () => { + fs.unlinkSync(filePath); + }); + + return filePath; +} + +async function openVolViewPage(fileName: string) { + const urlParams = `?urls=[tmp/${fileName}]`; + await volViewPage.open(urlParams); + await volViewPage.waitForViews(); +} + +describe('Sparse manifest.json', () => { + it('loads manifest with only URL data source', async () => { + await downloadFile(MINIMAL_DICOM.url, MINIMAL_DICOM.name); + + const sparseManifest = { + version: '6.1.0', + dataSources: [ + { + id: 0, + type: 'uri', + uri: `/tmp/${MINIMAL_DICOM.name}`, + }, + ], + }; + + const fileName = 'sparse-url-only.volview.zip'; + await writeManifestToZip(sparseManifest, fileName); + await openVolViewPage(fileName); + + const notifications = await volViewPage.getNotificationsCount(); + expect(notifications).toEqual(0); + }); + + it('loads sparse manifest with tools section (rectangle)', async () => { + await downloadFile(MINIMAL_DICOM.url, MINIMAL_DICOM.name); + + const sparseManifest = { + version: '6.1.0', + dataSources: [ + { + id: 0, + type: 'uri', + uri: `/tmp/${MINIMAL_DICOM.name}`, + }, + ], + tools: { + rectangles: { + tools: [ + { + imageID: '0', + frameOfReference: { + planeOrigin: [0, 0, 0], + planeNormal: [0, 0, 1], + }, + slice: 0, + firstPoint: [10, 10, 0], + secondPoint: [50, 50, 0], + label: 'default', + }, + ], + labels: { + default: { + color: '#ff0000', + strokeWidth: 2, + }, + }, + }, + }, + }; + + const fileName = 'sparse-url-rectangle.volview.zip'; + await writeManifestToZip(sparseManifest, fileName); + await openVolViewPage(fileName); + + const notifications = await volViewPage.getNotificationsCount(); + expect(notifications).toEqual(0); + + const annotationsTab = await $( + 'button[data-testid="module-tab-Annotations"]' + ); + await annotationsTab.click(); + + const measurementsTab = await $('button.v-tab*=Measurements'); + await measurementsTab.waitForClickable(); + await measurementsTab.click(); + + await browser.waitUntil( + async () => { + const rectangleEntries = await $$( + '.v-list-item i.mdi-vector-square.tool-icon' + ); + const count = await rectangleEntries.length; + return count >= 1; + }, + { + timeout: 5000, + timeoutMsg: 'Rectangle tool not found in measurements list', + } + ); + }); +}); From 7a7445e646fcfd9b5aa9da77fb3e045612674627 Mon Sep 17 00:00:00 2001 From: Paul Elliott Date: Sat, 29 Nov 2025 19:03:25 -0500 Subject: [PATCH 02/25] fix: report errors and support multiple volumes in state file restoration - Show error messages when data sources fail to load during state restoration - Support multiple volumes from a single data source (e.g., DICOM folders) - Use getURLBasename for consistent name extraction with URL params flow --- src/io/import/processors/restoreStateFile.ts | 51 +++++++++++++++----- 1 file changed, 39 insertions(+), 12 deletions(-) diff --git a/src/io/import/processors/restoreStateFile.ts b/src/io/import/processors/restoreStateFile.ts index 8a58ee928..a0c6b2ea1 100644 --- a/src/io/import/processors/restoreStateFile.ts +++ b/src/io/import/processors/restoreStateFile.ts @@ -12,7 +12,7 @@ import { } from '@/src/io/import/common'; import { DataSource } from '@/src/io/import/dataSource'; import { MANIFEST, isStateFile } from '@/src/io/state-file'; -import { partition } from '@/src/utils'; +import { partition, getURLBasename } from '@/src/utils'; import { pipe } from '@/src/utils/functional'; import { makeDefaultSegmentGroupName, @@ -28,8 +28,11 @@ import openUriStream from '@/src/io/import/processors/openUriStream'; import updateUriType from '@/src/io/import/processors/updateUriType'; import downloadStream from '@/src/io/import/processors/downloadStream'; import { FileEntry } from '@/src/io/types'; +import { FILE_EXT_TO_MIME } from '@/src/io/mimeTypes'; import { useViewStore } from '@/src/store/views'; import { useViewConfigStore } from '@/src/store/view-configs'; +import { useMessageStore } from '@/src/store/messages'; +import { getDataSourceName } from '@/src/io/import/dataSource'; const LABELMAP_PALETTE_2_1_0 = { '1': { @@ -319,19 +322,22 @@ type ResolvingImportHandler = ChainHandler< ImportContext >; +const downloadNonDicomStream: ResolvingImportHandler = ( + dataSource, + context +) => { + if (dataSource.type === 'uri' && dataSource.mime === FILE_EXT_TO_MIME.dcm) { + return Skip; + } + return downloadStream(dataSource, context); +}; + const resolvingHandlers: ResolvingImportHandler[] = [ openUriStream, - - // updating the file/uri type should be first step in the pipeline updateFileMimeType, updateUriType, - - // stream handling - only download, not DICOM processing - // DICOM processing happens in the main import pipeline - downloadStream, - + downloadNonDicomStream, extractArchiveTarget, - (dataSource) => { return { type: 'resolved', dataSource }; }, @@ -373,13 +379,15 @@ async function rebuildDataSources( parent, }; } - case 'uri': + case 'uri': { + const defaultName = getURLBasename(serialized.uri) || serialized.uri; return { type: 'uri', uri: serialized.uri, - name: serialized.name ?? serialized.uri, + name: serialized.name ?? defaultName, mime: serialized.mime, }; + } case 'collection': { // these sources are no longer leaves serialized.sources.forEach((id) => { @@ -482,6 +490,7 @@ async function restoreDatasets( ); const stateIDToStoreID: Record = {}; + const messageStore = useMessageStore(); await Promise.all( [...leaves].map(async (leafId) => { @@ -492,9 +501,27 @@ async function restoreDatasets( const dataResults = importResults.filter( (r): r is typeof r & { type: 'data' } => r.type === 'data' ); + const errorResults = importResults.filter((r) => r.type === 'error'); + + if (errorResults.length > 0) { + const sourceName = getDataSourceName(dataSource) ?? 'Unknown source'; + const errorMessages = errorResults + .map((r) => ('error' in r ? r.error.message : 'Unknown error')) + .join(', '); + messageStore.addError( + `Failed to load data source: ${sourceName}`, + errorMessages + ); + } if (dataResults.length > 0) { - stateIDToStoreID[dataSourceIDToStateID[leafId]] = dataResults[0].dataID; + const stateID = dataSourceIDToStateID[leafId]; + stateIDToStoreID[stateID] = dataResults[0].dataID; + + dataResults.slice(1).forEach((result, index) => { + const generatedStateID = `${stateID}_${index + 1}`; + stateIDToStoreID[generatedStateID] = result.dataID; + }); } }) ); From bdb7ad78d1dc3b14585fdeec729a042b7225e25e Mon Sep 17 00:00:00 2001 From: Paul Elliott Date: Sat, 29 Nov 2025 20:56:37 -0500 Subject: [PATCH 03/25] fix: unify state file restoration with main import pipeline Integrate state file restoration into the main importDataSources pipeline for better parallel streaming support. Uses 3-phase restoration: - Phase 1: Set up view layout immediately (without data bindings) - Phase 2: Bind views incrementally as each data leaf completes - Phase 3: Restore tools, segments, and layers after all data loaded Key changes: - Remove duplicate resolvingHandlers chain from restoreStateFile - Add StateFileContext to track restoration progress across pipeline - Add stateFileLeaf metadata to data sources for tracking - Add deserializeLayout() and bindViewsToData() to view store - Handle URI fallback when archives can't be deserialized directly --- src/io/import/common.ts | 13 + src/io/import/dataSource.ts | 15 +- src/io/import/importDataSources.ts | 63 ++++- src/io/import/processors/restoreStateFile.ts | 281 +++++++++---------- src/store/views.ts | 22 +- wdio.shared.conf.ts | 2 +- 6 files changed, 237 insertions(+), 159 deletions(-) diff --git a/src/io/import/common.ts b/src/io/import/common.ts index ce6599adf..5c2e6de2c 100644 --- a/src/io/import/common.ts +++ b/src/io/import/common.ts @@ -4,6 +4,8 @@ import { ARCHIVE_FILE_TYPES } from '@/src/io/mimeTypes'; import { Awaitable } from '@vueuse/core'; import { Config } from '@/src/io/import/configJson'; import { ChainHandler } from '@/src/utils/evaluateChain'; +import type { Manifest } from '@/src/io/state-file/schema'; +import type { FileEntry } from '@/src/io/types'; export interface LoadableResult { type: 'data'; @@ -99,6 +101,15 @@ export const asOkayResult = (dataSource: DataSource): OkayResult => ({ export type ArchiveContents = Record; export type ArchiveCache = Map>; +export interface StateFileContext { + manifest: Manifest; + stateFiles: FileEntry[]; + stateIDToStoreID: Map; + pendingLeafCount: number; + onLeafImported: (stateID: string, storeID: string) => void; + onAllLeavesImported: () => Promise; +} + export interface ImportContext { // Caches URL responses fetchFileCache?: FetchCache; @@ -113,6 +124,8 @@ export interface ImportContext { importDataSources?: ( dataSources: DataSource[] ) => Promise; + // State file restoration context for 3-phase restoration + stateFileContext?: StateFileContext; } export type ImportHandler = ChainHandler< diff --git a/src/io/import/dataSource.ts b/src/io/import/dataSource.ts index aa4d233dd..53f305fe7 100644 --- a/src/io/import/dataSource.ts +++ b/src/io/import/dataSource.ts @@ -53,18 +53,19 @@ export interface ChunkSource { mime: string; } +export interface StateFileLeaf { + stateID: string; +} + /** * Represents a source of data. * * The parent chain denotes the provenance for each step of the data source resolution. */ -export type DataSource = { parent?: DataSource } & ( - | FileSource - | UriSource - | ArchiveSource - | ChunkSource - | CollectionSource -); +export type DataSource = { + parent?: DataSource; + stateFileLeaf?: StateFileLeaf; +} & (FileSource | UriSource | ArchiveSource | ChunkSource | CollectionSource); /** * Creates a DataSource from a single file. diff --git a/src/io/import/importDataSources.ts b/src/io/import/importDataSources.ts index 72d7c11da..66fa5e97e 100644 --- a/src/io/import/importDataSources.ts +++ b/src/io/import/importDataSources.ts @@ -1,6 +1,7 @@ import { ImportHandler, ImportResult, + ImportContext, asErrorResult, asLoadableResult, ConfigResult, @@ -10,7 +11,11 @@ import { ImportDataSourcesResult, asIntermediateResult, } from '@/src/io/import/common'; -import { DataSource, ChunkSource } from '@/src/io/import/dataSource'; +import { + DataSource, + ChunkSource, + StateFileLeaf, +} from '@/src/io/import/dataSource'; import handleDicomFile from '@/src/io/import/processors/handleDicomFile'; import extractArchive from '@/src/io/import/processors/extractArchive'; import extractArchiveTarget from '@/src/io/import/processors/extractArchiveTarget'; @@ -43,7 +48,14 @@ const unhandledResource: ImportHandler = (dataSource) => { const handleCollections: ImportHandler = (dataSource) => { if (dataSource.type !== 'collection') return Skip; - return asIntermediateResult(dataSource.sources); + // Propagate stateFileLeaf to sources so DICOM volumes can be mapped back to state file datasets + const sources = dataSource.stateFileLeaf + ? dataSource.sources.map((src) => ({ + ...src, + stateFileLeaf: dataSource.stateFileLeaf, + })) + : dataSource.sources; + return asIntermediateResult(sources); }; function isSelectable(result: ImportResult): result is LoadableVolumeResult { @@ -62,6 +74,39 @@ const applyConfigsPostState = ( } }); +function findStateFileLeaf(dataSource: DataSource): StateFileLeaf | undefined { + let current: DataSource | undefined = dataSource; + while (current) { + if (current.stateFileLeaf) return current.stateFileLeaf; + current = current.parent; + } + // For collections (DICOM volumes), check the first source's parent chain + if (dataSource.type === 'collection' && dataSource.sources.length > 0) { + return findStateFileLeaf(dataSource.sources[0]); + } + return undefined; +} + +async function handleStateFileResult( + result: LoadableResult, + importContext: ImportContext +) { + const stateLeaf = findStateFileLeaf(result.dataSource); + if (stateLeaf && importContext.stateFileContext) { + const ctx = importContext.stateFileContext; + ctx.stateIDToStoreID.set(stateLeaf.stateID, result.dataID); + + // Phase 2: Immediately bind view to this data so user sees streaming + ctx.onLeafImported(stateLeaf.stateID, result.dataID); + + ctx.pendingLeafCount--; + if (ctx.pendingLeafCount === 0) { + // Phase 3: Restore tools/segments after all data loaded + await ctx.onAllLeavesImported(); + } + } +} + async function importDicomChunkSources(sources: ChunkSource[]) { if (sources.length === 0) return []; @@ -98,7 +143,7 @@ export async function importDataSources( while (cleanupHandlers.length) cleanupHandlers.pop()!(); }; - const importContext = { + const importContext: ImportContext = { fetchFileCache: new Map(), onCleanup, importDataSources, @@ -174,10 +219,13 @@ export async function importDataSources( } break; case 'ok': - case 'data': case 'error': results.push(result); break; + case 'data': + results.push(result); + await handleStateFileResult(result, importContext); + break; default: throw new Error(`Invalid result: ${result}`); } @@ -193,7 +241,12 @@ export async function importDataSources( ); try { - results.push(...(await importDicomChunkSources(dicomChunkSources))); + const dicomResults = await importDicomChunkSources(dicomChunkSources); + results.push(...dicomResults); + // Handle state file results for DICOM volumes + for (const dicomResult of dicomResults) { + await handleStateFileResult(dicomResult, importContext); + } } catch (err) { const errorSource = dicomChunkSources.length === 1 diff --git a/src/io/import/processors/restoreStateFile.ts b/src/io/import/processors/restoreStateFile.ts index a0c6b2ea1..5cfc1c933 100644 --- a/src/io/import/processors/restoreStateFile.ts +++ b/src/io/import/processors/restoreStateFile.ts @@ -5,10 +5,9 @@ import { } from '@/src/io/state-file/schema'; import { asErrorResult, - asOkayResult, - ImportContext, + asIntermediateResult, ImportHandler, - ImportResult, + StateFileContext, } from '@/src/io/import/common'; import { DataSource } from '@/src/io/import/dataSource'; import { MANIFEST, isStateFile } from '@/src/io/state-file'; @@ -21,18 +20,10 @@ import { import { useToolStore } from '@/src/store/tools'; import { useLayersStore } from '@/src/store/datasets-layers'; import { extractFilesFromZip } from '@/src/io/zip'; -import updateFileMimeType from '@/src/io/import/processors/updateFileMimeType'; -import extractArchiveTarget from '@/src/io/import/processors/extractArchiveTarget'; -import { ChainHandler, evaluateChain, Skip } from '@/src/utils/evaluateChain'; -import openUriStream from '@/src/io/import/processors/openUriStream'; -import updateUriType from '@/src/io/import/processors/updateUriType'; -import downloadStream from '@/src/io/import/processors/downloadStream'; -import { FileEntry } from '@/src/io/types'; -import { FILE_EXT_TO_MIME } from '@/src/io/mimeTypes'; +import type { FileEntry } from '@/src/io/types'; +import { Skip } from '@/src/utils/evaluateChain'; import { useViewStore } from '@/src/store/views'; import { useViewConfigStore } from '@/src/store/view-configs'; -import { useMessageStore } from '@/src/store/messages'; -import { getDataSourceName } from '@/src/io/import/dataSource'; const LABELMAP_PALETTE_2_1_0 = { '1': { @@ -311,39 +302,29 @@ const migrateManifest = (manifestString: string) => { ); }; -type ResolvedResult = { - type: 'resolved'; - dataSource: DataSource; -}; - -type ResolvingImportHandler = ChainHandler< - DataSource, - ImportResult | ResolvedResult, - ImportContext ->; - -const downloadNonDicomStream: ResolvingImportHandler = ( - dataSource, - context -) => { - if (dataSource.type === 'uri' && dataSource.mime === FILE_EXT_TO_MIME.dcm) { - return Skip; +function findRootUriAncestors( + id: number, + byId: Record +): DataSourceType[] { + const src = byId[id]; + if (!src) return []; + if (src.type === 'uri') return [src]; + if ('parent' in src && src.parent !== undefined) { + return findRootUriAncestors(src.parent, byId); } - return downloadStream(dataSource, context); -}; - -const resolvingHandlers: ResolvingImportHandler[] = [ - openUriStream, - updateFileMimeType, - updateUriType, - downloadNonDicomStream, - extractArchiveTarget, - (dataSource) => { - return { type: 'resolved', dataSource }; - }, -]; + if (src.type === 'collection') { + const uris = new Map(); + src.sources.forEach((sourceId) => { + findRootUriAncestors(sourceId, byId).forEach((uri) => { + uris.set(uri.id, uri); + }); + }); + return [...uris.values()]; + } + return []; +} -async function rebuildDataSources( +function rebuildDataSources( serializedDataSources: DataSourceType[], fileIDToFile: Record ) { @@ -358,7 +339,7 @@ async function rebuildDataSources( const deserialize = ( serialized: (typeof serializedDataSources)[number] - ): DataSource => { + ): DataSource | null => { const { type } = serialized; switch (type) { case 'file': @@ -369,10 +350,12 @@ async function rebuildDataSources( }; case 'archive': { const parent = dataSourceCache[serialized.parent]; - if (!parent) - throw new Error('Could not find the parent of an archive source'); - if (parent.type !== 'file') - throw new Error('Archive source parent is not a file'); + if (!parent) { + return null; + } + if (parent.type !== 'file') { + return null; + } return { type: 'archive', path: serialized.path, @@ -389,13 +372,15 @@ async function rebuildDataSources( }; } case 'collection': { - // these sources are no longer leaves serialized.sources.forEach((id) => { leaves.delete(id); }); - const sources = serialized.sources.map((id) => dataSourceCache[id]); - if (sources.some((src) => !src)) - throw new Error('Could not deserialize a collection source'); + const sources = serialized.sources + .map((id) => dataSourceCache[id]) + .filter((src): src is DataSource => src != null); + if (sources.length === 0) { + return null; + } return { type: 'collection', sources, @@ -408,9 +393,6 @@ async function rebuildDataSources( } }; - // serializedDataSources should be topologically ordered by ancestors first - // and descendants last. This is established in - // datasets.ts/serializeDataSource() for (let i = 0; i < serializedDataSources.length; i++) { const serializedSrc = serializedDataSources[i]; @@ -418,40 +400,27 @@ async function rebuildDataSources( continue; } - let dataSource = deserialize(serializedSrc); + const dataSource = deserialize(serializedSrc); + + if (!dataSource) { + const rootUris = findRootUriAncestors(serializedSrc.id, byId); + leaves.delete(serializedSrc.id); + rootUris.forEach((uri) => leaves.add(uri.id)); + continue; + } if (serializedSrc.parent) { dataSource.parent = dataSourceCache[serializedSrc.parent]; leaves.delete(serializedSrc.parent); } - let stillResolving = true; - while (stillResolving) { - const result = await evaluateChain(dataSource, resolvingHandlers); - - stillResolving = result.type !== 'resolved'; - if (!stillResolving) break; - - if (result.type !== 'intermediate') { - throw new Error( - 'Resolving pipeline does not produce intermediate results!' - ); - } - - dataSource = result.dataSources[0]; - } - dataSourceCache[serializedSrc.id] = dataSource; } - return { dataSourceCache, leaves }; + return { dataSourceCache, leaves, byId }; } -async function restoreDatasets( - manifest: Manifest, - datasetFiles: FileEntry[], - context?: ImportContext -) { +function prepareLeafDataSources(manifest: Manifest, datasetFiles: FileEntry[]) { const { dataSources } = manifest; const datasets = manifest.datasets ?? @@ -484,49 +453,59 @@ async function restoreDatasets( {} ); - const { dataSourceCache, leaves } = await rebuildDataSources( + const { dataSourceCache, leaves, byId } = rebuildDataSources( dataSources, fileIDToFile ); - const stateIDToStoreID: Record = {}; - const messageStore = useMessageStore(); - - await Promise.all( - [...leaves].map(async (leafId) => { + const leafDataSources = [...leaves] + .filter((leafId) => leafId in dataSourceCache) + .map((leafId) => { const dataSource = dataSourceCache[leafId]; - const importResults = - (await context?.importDataSources?.([dataSource])) ?? []; - - const dataResults = importResults.filter( - (r): r is typeof r & { type: 'data' } => r.type === 'data' - ); - const errorResults = importResults.filter((r) => r.type === 'error'); - - if (errorResults.length > 0) { - const sourceName = getDataSourceName(dataSource) ?? 'Unknown source'; - const errorMessages = errorResults - .map((r) => ('error' in r ? r.error.message : 'Unknown error')) - .join(', '); - messageStore.addError( - `Failed to load data source: ${sourceName}`, - errorMessages - ); - } - if (dataResults.length > 0) { - const stateID = dataSourceIDToStateID[leafId]; - stateIDToStoreID[stateID] = dataResults[0].dataID; + let stateID = dataSourceIDToStateID[leafId]; - dataResults.slice(1).forEach((result, index) => { - const generatedStateID = `${stateID}_${index + 1}`; - stateIDToStoreID[generatedStateID] = result.dataID; + if (!stateID) { + const matchingDataset = datasets.find((ds) => { + const rootUris = findRootUriAncestors(ds.dataSourceId, byId); + return rootUris.some((uri) => uri.id === leafId); }); + if (matchingDataset) { + stateID = matchingDataset.id; + } } - }) + + return { + ...dataSource, + stateFileLeaf: { stateID }, + }; + }); + + return leafDataSources; +} + +async function completeStateFileRestore(ctx: StateFileContext) { + const { manifest, stateFiles, stateIDToStoreID } = ctx; + const stateIDToStoreIDRecord = Object.fromEntries(stateIDToStoreID); + + // Restore view configs (handles missing configs gracefully) + useViewConfigStore().deserializeAll(manifest, stateIDToStoreIDRecord); + + // Restore the labelmaps + const segmentGroupIDMap = await useSegmentGroupStore().deserialize( + manifest, + stateFiles, + stateIDToStoreIDRecord ); - return stateIDToStoreID; + // Restore the tools (each tool handles missing data gracefully) + useToolStore().deserialize( + manifest, + segmentGroupIDMap, + stateIDToStoreIDRecord + ); + + useLayersStore().deserialize(manifest, stateIDToStoreIDRecord); } const restoreStateFile: ImportHandler = async (dataSource, context) => { @@ -554,45 +533,61 @@ const restoreStateFile: ImportHandler = async (dataSource, context) => { ); } - // Set layout if provided, otherwise use existing default layout - if (manifest.layout) { - useViewStore().setLayout(manifest.layout); - } - - const stateIDToStoreID = await restoreDatasets( - manifest, - restOfStateFile, - context - ); - - // Restore the views (handles missing viewByID gracefully) + // Phase 1: Set up view layout immediately (without data bindings) const viewStore = useViewStore(); - viewStore.deserialize(manifest, stateIDToStoreID); + viewStore.deserializeLayout(manifest); + + // Prepare leaf data sources with state file tags + const leafDataSources = prepareLeafDataSources(manifest, restOfStateFile); + + if (leafDataSources.length === 0) { + // No datasets to import, complete restoration immediately + await completeStateFileRestore({ + manifest, + stateFiles: restOfStateFile, + stateIDToStoreID: new Map(), + pendingLeafCount: 0, + onLeafImported: () => {}, + onAllLeavesImported: async () => {}, + }); - // When viewByID is not in manifest, assign first dataset to all views - if (!manifest.viewByID) { - const firstDataID = Object.values(stateIDToStoreID)[0]; - if (firstDataID) { - viewStore.setDataForAllViews(firstDataID); - } + // When viewByID is not in manifest, there's no data to assign + return asIntermediateResult([]); } - // Restore view configs (handles missing configs gracefully) - useViewConfigStore().deserializeAll(manifest, stateIDToStoreID); - - // Restore the labelmaps - const segmentGroupIDMap = await useSegmentGroupStore().deserialize( + // Set up state file context for phase 2 and 3 callbacks + const stateFileContext: StateFileContext = { manifest, - restOfStateFile, - stateIDToStoreID - ); - - // Restore the tools (each tool handles missing data gracefully) - useToolStore().deserialize(manifest, segmentGroupIDMap, stateIDToStoreID); + stateFiles: restOfStateFile, + stateIDToStoreID: new Map(), + pendingLeafCount: leafDataSources.length, + onLeafImported: (stateID: string, storeID: string) => { + // Phase 2: Bind view to data as each leaf completes + viewStore.bindViewsToData(stateID, storeID, manifest); + }, + onAllLeavesImported: async () => { + // Phase 3: Restore segment groups, tools, layers after all data loaded + await completeStateFileRestore(stateFileContext); + + // When viewByID is not in manifest, assign first dataset to all views + if (!manifest.viewByID) { + const firstStoreID = stateFileContext.stateIDToStoreID + .values() + .next().value; + if (firstStoreID) { + viewStore.setDataForAllViews(firstStoreID); + } + } + }, + }; - useLayersStore().deserialize(manifest, stateIDToStoreID); + // Store context for use by main pipeline + if (context) { + context.stateFileContext = stateFileContext; + } - return asOkayResult(dataSource); + // Return leaf data sources to be processed by main pipeline + return asIntermediateResult(leafDataSources); } return Skip; }; diff --git a/src/store/views.ts b/src/store/views.ts index 9e3d20071..e6825d480 100644 --- a/src/store/views.ts +++ b/src/store/views.ts @@ -307,7 +307,7 @@ export const useViewStore = defineStore('view', () => { manifest.viewByID = viewByID; } - function deserialize(manifest: Manifest, dataIDMap: Record) { + function deserializeLayout(manifest: Manifest) { if (manifest.layout) { setLayout(manifest.layout); } @@ -327,12 +327,27 @@ export const useViewStore = defineStore('view', () => { Object.entries(manifest.viewByID).forEach(([id, view]) => { viewByID[id] = { ...view, - dataID: view.dataID ? dataIDMap[view.dataID] : null, + dataID: null, } as unknown as ViewInfo; }); } } + function bindViewsToData( + stateID: string, + storeID: string, + manifest: Manifest + ) { + if (!manifest.viewByID) return; + + Object.entries(manifest.viewByID).forEach(([id, view]) => { + if (view.dataID === stateID && viewByID[id]) { + viewByID[id].dataID = storeID; + ViewDataChangeEvent.trigger(id, storeID); + } + }); + } + // initialization firstLayout.views.forEach((viewInit) => { @@ -375,7 +390,8 @@ export const useViewStore = defineStore('view', () => { removeDataFromViews, toggleActiveViewMaximized, serialize, - deserialize, + deserializeLayout, + bindViewsToData, ViewDataChangeEvent, LayoutViewReplacedEvent, }; diff --git a/wdio.shared.conf.ts b/wdio.shared.conf.ts index b1552078c..ae285ad38 100644 --- a/wdio.shared.conf.ts +++ b/wdio.shared.conf.ts @@ -7,7 +7,7 @@ export const WINDOW_SIZE = [1200, 800] as const; export const TEST_PORT = 4567; // for slow connections try: // DOWNLOAD_TIMEOUT=60000 && npm run test:e2e:dev -export const DOWNLOAD_TIMEOUT = Number(process.env.DOWNLOAD_TIMEOUT ?? 60000); +export const DOWNLOAD_TIMEOUT = Number(process.env.DOWNLOAD_TIMEOUT ?? 20000); const ROOT = projectRoot(); const TMP = '.tmp/'; From 024b5dea45453cb8eac3d10a826d3ac510c69861 Mon Sep 17 00:00:00 2001 From: Paul Elliott Date: Sun, 30 Nov 2025 09:20:48 -0500 Subject: [PATCH 04/25] refactor(e2e): consolidate test utilities and fix slow state-manifest test - Move writeManifestToZip and openVolViewPage to shared utils.ts - Add browser.reloadSession() between tests to avoid stale element issues - Use waitForDisplayed instead of waitUntil polling loop - Fix remote-manifest test to not use openVolViewPage for failure case --- tests/specs/remote-manifest.e2e.ts | 4 +- tests/specs/sparse-manifest.e2e.ts | 36 +--------------- tests/specs/state-manifest.e2e.ts | 69 ++++-------------------------- tests/specs/utils.ts | 30 +++++++++++-- 4 files changed, 40 insertions(+), 99 deletions(-) diff --git a/tests/specs/remote-manifest.e2e.ts b/tests/specs/remote-manifest.e2e.ts index 382ea7af8..154760d19 100644 --- a/tests/specs/remote-manifest.e2e.ts +++ b/tests/specs/remote-manifest.e2e.ts @@ -9,7 +9,9 @@ describe('VolView loading of remoteManifest.json', () => { }; const fileName = 'remoteFilesBadUrl.json'; await writeManifestToFile(manifest, fileName); - await openVolViewPage(fileName); + + const urlParams = `?urls=[tmp/${fileName}]`; + await volViewPage.open(urlParams); await volViewPage.waitForNotification(); }); diff --git a/tests/specs/sparse-manifest.e2e.ts b/tests/specs/sparse-manifest.e2e.ts index 1341b5c1e..3c15c4580 100644 --- a/tests/specs/sparse-manifest.e2e.ts +++ b/tests/specs/sparse-manifest.e2e.ts @@ -1,33 +1,5 @@ -import * as path from 'path'; -import * as fs from 'fs'; -import { cleanuptotal } from 'wdio-cleanuptotal-service'; -import JSZip from 'jszip'; -import { TEMP_DIR } from '../../wdio.shared.conf'; -import { volViewPage } from '../pageobjects/volview.page'; import { MINIMAL_DICOM } from './configTestUtils'; -import { downloadFile } from './utils'; - -async function writeManifestToZip(manifest: unknown, fileName: string) { - const filePath = path.join(TEMP_DIR, fileName); - const manifestString = JSON.stringify(manifest, null, 2); - - const zip = new JSZip(); - zip.file('manifest.json', manifestString); - const data = await zip.generateAsync({ type: 'nodebuffer' }); - - await fs.promises.writeFile(filePath, data); - cleanuptotal.addCleanup(async () => { - fs.unlinkSync(filePath); - }); - - return filePath; -} - -async function openVolViewPage(fileName: string) { - const urlParams = `?urls=[tmp/${fileName}]`; - await volViewPage.open(urlParams); - await volViewPage.waitForViews(); -} +import { downloadFile, openVolViewPage, writeManifestToZip } from './utils'; describe('Sparse manifest.json', () => { it('loads manifest with only URL data source', async () => { @@ -47,9 +19,6 @@ describe('Sparse manifest.json', () => { const fileName = 'sparse-url-only.volview.zip'; await writeManifestToZip(sparseManifest, fileName); await openVolViewPage(fileName); - - const notifications = await volViewPage.getNotificationsCount(); - expect(notifications).toEqual(0); }); it('loads sparse manifest with tools section (rectangle)', async () => { @@ -93,9 +62,6 @@ describe('Sparse manifest.json', () => { await writeManifestToZip(sparseManifest, fileName); await openVolViewPage(fileName); - const notifications = await volViewPage.getNotificationsCount(); - expect(notifications).toEqual(0); - const annotationsTab = await $( 'button[data-testid="module-tab-Annotations"]' ); diff --git a/tests/specs/state-manifest.e2e.ts b/tests/specs/state-manifest.e2e.ts index 6ab92db55..78d953e5b 100644 --- a/tests/specs/state-manifest.e2e.ts +++ b/tests/specs/state-manifest.e2e.ts @@ -1,53 +1,7 @@ import * as path from 'path'; -import * as fs from 'fs'; -import { cleanuptotal } from 'wdio-cleanuptotal-service'; -import JSZip from 'jszip'; -import { FIXTURES, TEMP_DIR } from '../../wdio.shared.conf'; +import { FIXTURES, WINDOW_SIZE } from '../../wdio.shared.conf'; import { volViewPage } from '../pageobjects/volview.page'; - -async function writeManifestToZip(manifestPath: string, fileName: string) { - const filePath = path.join(TEMP_DIR, fileName); - const manifest = fs.readFileSync(manifestPath); - - const zip = new JSZip(); - zip.file('manifest.json', manifest); - const data = await zip.generateAsync({ type: 'nodebuffer' }); - - await fs.promises.writeFile(filePath, data); - cleanuptotal.addCleanup(async () => { - fs.unlinkSync(filePath); - }); - - return filePath; -} - -async function openVolViewPage(fileName: string) { - const urlParams = `?urls=[tmp/${fileName}]`; - await volViewPage.open(urlParams); - await volViewPage.waitForViews(); - const notifications = await volViewPage.getNotificationsCount(); - expect(notifications).toEqual(0); - - // Check that no placeholder overlays are visible (mdi-image-off icons) - // The overlays are in divs that are shown/hidden based on imageID - await browser.waitUntil( - async () => { - const visibleOverlayCount = await browser.execute(() => { - const imageOffIcons = document.querySelectorAll('i.mdi-image-off'); - return Array.from(imageOffIcons).filter((icon) => { - const parent = icon.closest('div.overlay'); - if (!parent) return false; - const style = window.getComputedStyle(parent); - return style.display !== 'none' && style.visibility !== 'hidden'; - }).length; - }); - return visibleOverlayCount === 0; - }, - { - timeoutMsg: 'Image placeholder overlays are still visible', - } - ); -} +import { openVolViewPage, writeManifestToZip } from './utils'; describe('State file manifest.json code', () => { it('has no errors loading version 5.0.1 manifest.json file ', async () => { @@ -61,24 +15,19 @@ describe('State file manifest.json code', () => { }); it('loads 5.0.1 manifest with axial layer layout', async () => { + await browser.reloadSession(); + await browser.setWindowSize(...WINDOW_SIZE); const manifestPath = path.join(FIXTURES, 'layer-axial.5-0-1.volview.json'); const fileName = 'temp-layer-axial.volview.zip'; await writeManifestToZip(manifestPath, fileName); await openVolViewPage(fileName); - // Switch to the Rendering tab - const renderingTab = await $('button[data-testid="module-tab-Rendering"]'); + const renderingTab = volViewPage.renderingModuleTab; await renderingTab.click(); - // Wait for and verify that the layer opacity slider is visible - await browser.waitUntil( - async () => { - const layerSlider = await $('[data-testid="layer-opacity-slider"]'); - return layerSlider.isDisplayed(); - }, - { - timeoutMsg: 'Layer opacity slider is not visible in the Rendering tab', - } - ); + const layerSlider = $('[data-testid="layer-opacity-slider"]'); + await layerSlider.waitForDisplayed({ + timeoutMsg: 'Layer opacity slider is not visible in the Rendering tab', + }); }); }); diff --git a/tests/specs/utils.ts b/tests/specs/utils.ts index 41c74e57e..0c1b809de 100644 --- a/tests/specs/utils.ts +++ b/tests/specs/utils.ts @@ -2,6 +2,7 @@ import * as path from 'path'; import * as fs from 'fs'; import { z } from 'zod'; import { cleanuptotal } from 'wdio-cleanuptotal-service'; +import JSZip from 'jszip'; import { TEMP_DIR } from '../../wdio.shared.conf'; import { volViewPage } from '../pageobjects/volview.page'; import { RemoteResource } from '../../src/io/manifest'; @@ -36,7 +37,7 @@ export const downloadFile = async (url: string, fileName: string) => { return savePath; }; -export async function writeManifestToFile(manifest: any, fileName: string) { +export async function writeManifestToFile(manifest: unknown, fileName: string) { const filePath = path.join(TEMP_DIR, fileName); await fs.promises.writeFile(filePath, JSON.stringify(manifest)); cleanuptotal.addCleanup(async () => { @@ -45,9 +46,34 @@ export async function writeManifestToFile(manifest: any, fileName: string) { return filePath; } +export async function writeManifestToZip( + manifest: unknown | string, + fileName: string +) { + const filePath = path.join(TEMP_DIR, fileName); + const manifestString = + typeof manifest === 'string' + ? fs.readFileSync(manifest) + : JSON.stringify(manifest, null, 2); + + const zip = new JSZip(); + zip.file('manifest.json', manifestString); + const data = await zip.generateAsync({ type: 'nodebuffer' }); + + await fs.promises.writeFile(filePath, data); + cleanuptotal.addCleanup(async () => { + fs.unlinkSync(filePath); + }); + + return filePath; +} + export async function openVolViewPage(fileName: string) { const urlParams = `?urls=[tmp/${fileName}]`; await volViewPage.open(urlParams); + await volViewPage.waitForViews(); + const notifications = await volViewPage.getNotificationsCount(); + expect(notifications).toEqual(0); } type RemoteResourceType = z.infer & { name: string }; @@ -61,11 +87,9 @@ export async function openUrls(urlsAndNames: Array) { const manifest = { resources, }; - // Use a unique filename to avoid race conditions when tests run in parallel const fileName = `openUrlsManifest_${Date.now()}_${Math.random() .toString(36) .substring(7)}.json`; await writeManifestToFile(manifest, fileName); await openVolViewPage(fileName); - await volViewPage.waitForViews(); } From 274197dfb941fa7e262c145a93050a77aa730803 Mon Sep 17 00:00:00 2001 From: Paul Elliott Date: Sun, 30 Nov 2025 09:20:55 -0500 Subject: [PATCH 05/25] refactor: extract state file migrations to dedicated module --- src/io/import/processors/restoreStateFile.ts | 284 +------------------ src/io/state-file/migrations.ts | 264 +++++++++++++++++ 2 files changed, 266 insertions(+), 282 deletions(-) create mode 100644 src/io/state-file/migrations.ts diff --git a/src/io/import/processors/restoreStateFile.ts b/src/io/import/processors/restoreStateFile.ts index 5cfc1c933..b79517d2b 100644 --- a/src/io/import/processors/restoreStateFile.ts +++ b/src/io/import/processors/restoreStateFile.ts @@ -12,11 +12,7 @@ import { import { DataSource } from '@/src/io/import/dataSource'; import { MANIFEST, isStateFile } from '@/src/io/state-file'; import { partition, getURLBasename } from '@/src/utils'; -import { pipe } from '@/src/utils/functional'; -import { - makeDefaultSegmentGroupName, - useSegmentGroupStore, -} from '@/src/store/segmentGroups'; +import { useSegmentGroupStore } from '@/src/store/segmentGroups'; import { useToolStore } from '@/src/store/tools'; import { useLayersStore } from '@/src/store/datasets-layers'; import { extractFilesFromZip } from '@/src/io/zip'; @@ -24,283 +20,7 @@ import type { FileEntry } from '@/src/io/types'; import { Skip } from '@/src/utils/evaluateChain'; import { useViewStore } from '@/src/store/views'; import { useViewConfigStore } from '@/src/store/view-configs'; - -const LABELMAP_PALETTE_2_1_0 = { - '1': { - value: 1, - name: 'Segment 1', - color: [153, 153, 0, 255], - }, - '2': { - value: 2, - name: 'Segment 2', - color: [76, 76, 0, 255], - }, - '3': { - value: 3, - name: 'Segment 3', - color: [255, 255, 0, 255], - }, - '4': { - value: 4, - name: 'Segment 4', - color: [0, 76, 0, 255], - }, - '5': { - value: 5, - name: 'Segment 5', - color: [0, 153, 0, 255], - }, - '6': { - value: 6, - name: 'Segment 6', - color: [0, 255, 0, 255], - }, - '7': { - value: 7, - name: 'Segment 7', - color: [76, 0, 0, 255], - }, - '8': { - value: 8, - name: 'Segment 8', - color: [153, 0, 0, 255], - }, - '9': { - value: 9, - name: 'Segment 9', - color: [255, 0, 0, 255], - }, - '10': { - value: 10, - name: 'Segment 10', - color: [0, 76, 76, 255], - }, - '11': { - value: 11, - name: 'Segment 11', - color: [0, 153, 153, 255], - }, - '12': { - value: 12, - name: 'Segment 12', - color: [0, 255, 255, 255], - }, - '13': { - value: 13, - name: 'Segment 13', - color: [0, 0, 76, 255], - }, - '14': { - value: 14, - name: 'Segment 14', - color: [0, 0, 153, 255], - }, -}; - -const migrateOrPass = - (versions: Array, migrationFunc: (manifest: any) => any) => - (inputManifest: any) => { - if (versions.includes(inputManifest.version)) { - return migrationFunc(inputManifest); - } - return inputManifest; - }; - -const migrateBefore210 = (inputManifest: any) => { - const manifest = JSON.parse(JSON.stringify(inputManifest)); - manifest.version = '2.1.0'; - return manifest; -}; - -const migrate210To300 = (inputManifest: any) => { - const manifest = JSON.parse(JSON.stringify(inputManifest)); - manifest.tools.paint.activeSegmentGroupID = - inputManifest.tools.paint.activeLabelmapID; - delete manifest.tools.paint.activeLabelmapID; - - const order = Object.keys(LABELMAP_PALETTE_2_1_0).map((key) => Number(key)); - manifest.labelMaps = inputManifest.labelMaps.map( - (labelMap: any, index: number) => ({ - id: labelMap.id, - path: labelMap.path, - metadata: { - parentImage: labelMap.parent, - name: makeDefaultSegmentGroupName('My Image', index), - segments: { - order, - byValue: LABELMAP_PALETTE_2_1_0, - }, - }, - }) - ); - - manifest.version = '3.0.0'; - return manifest; -}; - -const migrate501To600 = (inputManifest: any) => { - const manifest = JSON.parse(JSON.stringify(inputManifest)); - - // Convert views array to viewByID object - if (manifest.views && Array.isArray(manifest.views)) { - manifest.viewByID = {}; - manifest.views.forEach((view: any) => { - const migratedView = { ...view }; - - // Add required 'name' field if missing - if (!migratedView.name) { - migratedView.name = migratedView.id; - } - - // Convert 'props' to 'options' if present - if (migratedView.props) { - // Convert any non-string values in props to strings for options - migratedView.options = {}; - Object.entries(migratedView.props).forEach(([key, value]) => { - if (typeof value === 'string') { - migratedView.options[key] = value; - } else { - // Convert arrays and objects to JSON strings - migratedView.options[key] = JSON.stringify(value); - } - }); - delete migratedView.props; - } - - // Add orientation for 2D views based on the view ID - if (migratedView.type === '2D' && !migratedView.options) { - migratedView.options = {}; - } - if (migratedView.type === '2D') { - // Set orientation based on view ID (Coronal, Sagittal, Axial) - if (['Coronal', 'Sagittal', 'Axial'].includes(migratedView.id)) { - migratedView.options.orientation = migratedView.id; - } - } - - // Handle type conversion for Oblique views - if (migratedView.type === 'Oblique3D') { - migratedView.type = 'Oblique'; - } - - const configKeys = Object.keys(migratedView.config || {}); - const primarySelection = manifest.primarySelection; - - migratedView.dataID = null; - if (configKeys.length > 0) { - migratedView.dataID = - primarySelection && configKeys.includes(primarySelection) - ? primarySelection - : configKeys[0]; - } - - manifest.viewByID[migratedView.id] = migratedView; - }); - delete manifest.views; - } - - // Add missing fields with proper defaults - if (manifest.isActiveViewMaximized === undefined) { - manifest.isActiveViewMaximized = false; - } - - if (manifest.activeView === undefined) { - manifest.activeView = null; - } - - // Convert layout to layoutSlots and update layout structure - if (manifest.layout && !manifest.layoutSlots) { - const slots: string[] = []; - - // Extract all slot names and convert layout to new format - const convertLayoutItem = (item: any): any => { - if (typeof item === 'string') { - // This is a view name like "Coronal", "3D", etc. - const slotIndex = slots.length; - slots.push(item); - return { - type: 'slot', - slotIndex, - }; - } - if (item.direction && item.items) { - // This is a nested layout - return { - type: 'layout', - direction: item.direction, - items: item.items.map(convertLayoutItem), - }; - } - return item; - }; - - // Convert the root layout - if (manifest.layout.direction && manifest.layout.items) { - manifest.layout = { - direction: manifest.layout.direction, - items: manifest.layout.items.map(convertLayoutItem), - }; - } - - manifest.layoutSlots = slots; - } - - // Ensure parentToLayers exists as an array - if (!manifest.parentToLayers) { - manifest.parentToLayers = []; - } - - manifest.version = '6.0.0'; - return manifest; -}; - -const migrate600To610 = (inputManifest: any) => { - const manifest = JSON.parse(JSON.stringify(inputManifest)); - - const migrateDirection = (dir: 'H' | 'V'): 'row' | 'column' => { - return dir === 'H' ? 'column' : 'row'; - }; - - const migrateLayout = (layout: any): any => { - if (!layout || typeof layout !== 'object') return layout; - - const migratedLayout = { ...layout }; - - if (layout.direction) { - migratedLayout.direction = migrateDirection(layout.direction); - } - - if (layout.items && Array.isArray(layout.items)) { - migratedLayout.items = layout.items.map((item: any) => { - if (item.type === 'layout') { - return migrateLayout(item); - } - return item; - }); - } - - return migratedLayout; - }; - - if (manifest.layout) { - manifest.layout = migrateLayout(manifest.layout); - } - - manifest.version = '6.1.0'; - return manifest; -}; - -const migrateManifest = (manifestString: string) => { - const inputManifest = JSON.parse(manifestString); - return pipe( - inputManifest, - migrateOrPass(['1.1.0', '1.0.0', '0.5.0'], migrateBefore210), - migrateOrPass(['2.1.0'], migrate210To300), - migrateOrPass(['5.0.1'], migrate501To600), - migrateOrPass(['6.0.0'], migrate600To610) - ); -}; +import { migrateManifest } from '@/src/io/state-file/migrations'; function findRootUriAncestors( id: number, diff --git a/src/io/state-file/migrations.ts b/src/io/state-file/migrations.ts new file mode 100644 index 000000000..e51575c2c --- /dev/null +++ b/src/io/state-file/migrations.ts @@ -0,0 +1,264 @@ +import { pipe } from '@/src/utils/functional'; +import { makeDefaultSegmentGroupName } from '@/src/store/segmentGroups'; + +const LABELMAP_PALETTE_2_1_0 = { + '1': { + value: 1, + name: 'Segment 1', + color: [153, 153, 0, 255], + }, + '2': { + value: 2, + name: 'Segment 2', + color: [76, 76, 0, 255], + }, + '3': { + value: 3, + name: 'Segment 3', + color: [255, 255, 0, 255], + }, + '4': { + value: 4, + name: 'Segment 4', + color: [0, 76, 0, 255], + }, + '5': { + value: 5, + name: 'Segment 5', + color: [0, 153, 0, 255], + }, + '6': { + value: 6, + name: 'Segment 6', + color: [0, 255, 0, 255], + }, + '7': { + value: 7, + name: 'Segment 7', + color: [76, 0, 0, 255], + }, + '8': { + value: 8, + name: 'Segment 8', + color: [153, 0, 0, 255], + }, + '9': { + value: 9, + name: 'Segment 9', + color: [255, 0, 0, 255], + }, + '10': { + value: 10, + name: 'Segment 10', + color: [0, 76, 76, 255], + }, + '11': { + value: 11, + name: 'Segment 11', + color: [0, 153, 153, 255], + }, + '12': { + value: 12, + name: 'Segment 12', + color: [0, 255, 255, 255], + }, + '13': { + value: 13, + name: 'Segment 13', + color: [0, 0, 76, 255], + }, + '14': { + value: 14, + name: 'Segment 14', + color: [0, 0, 153, 255], + }, +}; + +const migrateOrPass = + (versions: Array, migrationFunc: (manifest: any) => any) => + (inputManifest: any) => { + if (versions.includes(inputManifest.version)) { + return migrationFunc(inputManifest); + } + return inputManifest; + }; + +const migrateBefore210 = (inputManifest: any) => { + const manifest = JSON.parse(JSON.stringify(inputManifest)); + manifest.version = '2.1.0'; + return manifest; +}; + +const migrate210To300 = (inputManifest: any) => { + const manifest = JSON.parse(JSON.stringify(inputManifest)); + manifest.tools.paint.activeSegmentGroupID = + inputManifest.tools.paint.activeLabelmapID; + delete manifest.tools.paint.activeLabelmapID; + + const order = Object.keys(LABELMAP_PALETTE_2_1_0).map((key) => Number(key)); + manifest.labelMaps = inputManifest.labelMaps.map( + (labelMap: any, index: number) => ({ + id: labelMap.id, + path: labelMap.path, + metadata: { + parentImage: labelMap.parent, + name: makeDefaultSegmentGroupName('My Image', index), + segments: { + order, + byValue: LABELMAP_PALETTE_2_1_0, + }, + }, + }) + ); + + manifest.version = '3.0.0'; + return manifest; +}; + +const migrate501To600 = (inputManifest: any) => { + const manifest = JSON.parse(JSON.stringify(inputManifest)); + + if (manifest.views && Array.isArray(manifest.views)) { + manifest.viewByID = {}; + manifest.views.forEach((view: any) => { + const migratedView = { ...view }; + + if (!migratedView.name) { + migratedView.name = migratedView.id; + } + + if (migratedView.props) { + migratedView.options = {}; + Object.entries(migratedView.props).forEach(([key, value]) => { + if (typeof value === 'string') { + migratedView.options[key] = value; + } else { + migratedView.options[key] = JSON.stringify(value); + } + }); + delete migratedView.props; + } + + if (migratedView.type === '2D' && !migratedView.options) { + migratedView.options = {}; + } + if (migratedView.type === '2D') { + if (['Coronal', 'Sagittal', 'Axial'].includes(migratedView.id)) { + migratedView.options.orientation = migratedView.id; + } + } + + if (migratedView.type === 'Oblique3D') { + migratedView.type = 'Oblique'; + } + + const configKeys = Object.keys(migratedView.config || {}); + const primarySelection = manifest.primarySelection; + + migratedView.dataID = null; + if (configKeys.length > 0) { + migratedView.dataID = + primarySelection && configKeys.includes(primarySelection) + ? primarySelection + : configKeys[0]; + } + + manifest.viewByID[migratedView.id] = migratedView; + }); + delete manifest.views; + } + + if (manifest.isActiveViewMaximized === undefined) { + manifest.isActiveViewMaximized = false; + } + + if (manifest.activeView === undefined) { + manifest.activeView = null; + } + + if (manifest.layout && !manifest.layoutSlots) { + const slots: string[] = []; + + const convertLayoutItem = (item: any): any => { + if (typeof item === 'string') { + const slotIndex = slots.length; + slots.push(item); + return { + type: 'slot', + slotIndex, + }; + } + if (item.direction && item.items) { + return { + type: 'layout', + direction: item.direction, + items: item.items.map(convertLayoutItem), + }; + } + return item; + }; + + if (manifest.layout.direction && manifest.layout.items) { + manifest.layout = { + direction: manifest.layout.direction, + items: manifest.layout.items.map(convertLayoutItem), + }; + } + + manifest.layoutSlots = slots; + } + + if (!manifest.parentToLayers) { + manifest.parentToLayers = []; + } + + manifest.version = '6.0.0'; + return manifest; +}; + +const migrate600To610 = (inputManifest: any) => { + const manifest = JSON.parse(JSON.stringify(inputManifest)); + + const migrateDirection = (dir: 'H' | 'V'): 'row' | 'column' => { + return dir === 'H' ? 'column' : 'row'; + }; + + const migrateLayout = (layout: any): any => { + if (!layout || typeof layout !== 'object') return layout; + + const migratedLayout = { ...layout }; + + if (layout.direction) { + migratedLayout.direction = migrateDirection(layout.direction); + } + + if (layout.items && Array.isArray(layout.items)) { + migratedLayout.items = layout.items.map((item: any) => { + if (item.type === 'layout') { + return migrateLayout(item); + } + return item; + }); + } + + return migratedLayout; + }; + + if (manifest.layout) { + manifest.layout = migrateLayout(manifest.layout); + } + + manifest.version = '6.1.0'; + return manifest; +}; + +export const migrateManifest = (manifestString: string) => { + const inputManifest = JSON.parse(manifestString); + return pipe( + inputManifest, + migrateOrPass(['1.1.0', '1.0.0', '0.5.0'], migrateBefore210), + migrateOrPass(['2.1.0'], migrate210To300), + migrateOrPass(['5.0.1'], migrate501To600), + migrateOrPass(['6.0.0'], migrate600To610) + ); +}; From 431373d9e37fc2938477256f059e52dbab07a54b Mon Sep 17 00:00:00 2001 From: Paul Elliott Date: Sun, 30 Nov 2025 09:54:52 -0500 Subject: [PATCH 06/25] refactor: simplify state file data source restoration Replace complex rebuildDataSources() with simpler resolveToLeafSources() that walks the serialized DAG to find importable leaf sources (URIs or files) without reconstructing the full runtime DataSource tree. --- src/io/import/importDataSources.ts | 2 +- src/io/import/processors/restoreStateFile.ts | 237 ++++++------------- 2 files changed, 71 insertions(+), 168 deletions(-) diff --git a/src/io/import/importDataSources.ts b/src/io/import/importDataSources.ts index 66fa5e97e..842552de1 100644 --- a/src/io/import/importDataSources.ts +++ b/src/io/import/importDataSources.ts @@ -23,7 +23,7 @@ import handleAmazonS3 from '@/src/io/import/processors/handleAmazonS3'; import handleGoogleCloudStorage from '@/src/io/import/processors/handleGoogleCloudStorage'; import importSingleFile from '@/src/io/import/processors/importSingleFile'; import handleRemoteManifest from '@/src/io/import/processors/remoteManifest'; -import restoreStateFile from '@/src/io/import/processors/restoreStateFile'; +import { restoreStateFile } from '@/src/io/import/processors/restoreStateFile'; import updateFileMimeType from '@/src/io/import/processors/updateFileMimeType'; import handleConfig from '@/src/io/import/processors/handleConfig'; import { diff --git a/src/io/import/processors/restoreStateFile.ts b/src/io/import/processors/restoreStateFile.ts index b79517d2b..6d88fb78a 100644 --- a/src/io/import/processors/restoreStateFile.ts +++ b/src/io/import/processors/restoreStateFile.ts @@ -9,7 +9,6 @@ import { ImportHandler, StateFileContext, } from '@/src/io/import/common'; -import { DataSource } from '@/src/io/import/dataSource'; import { MANIFEST, isStateFile } from '@/src/io/state-file'; import { partition, getURLBasename } from '@/src/utils'; import { useSegmentGroupStore } from '@/src/store/segmentGroups'; @@ -22,213 +21,119 @@ import { useViewStore } from '@/src/store/views'; import { useViewConfigStore } from '@/src/store/view-configs'; import { migrateManifest } from '@/src/io/state-file/migrations'; -function findRootUriAncestors( +type LeafSource = + | { type: 'uri'; uri: string; name: string; mime?: string } + | { type: 'file'; file: File; fileType: string }; + +function resolveToLeafSources( id: number, - byId: Record -): DataSourceType[] { + byId: Record, + datasetFilePath: Record | undefined, + pathToFile: Record +): LeafSource[] { const src = byId[id]; if (!src) return []; - if (src.type === 'uri') return [src]; - if ('parent' in src && src.parent !== undefined) { - return findRootUriAncestors(src.parent, byId); - } - if (src.type === 'collection') { - const uris = new Map(); - src.sources.forEach((sourceId) => { - findRootUriAncestors(sourceId, byId).forEach((uri) => { - uris.set(uri.id, uri); - }); - }); - return [...uris.values()]; - } - return []; -} -function rebuildDataSources( - serializedDataSources: DataSourceType[], - fileIDToFile: Record -) { - const dataSourceCache: Record = {}; - const byId: Record = {}; - const leaves = new Set(); - - serializedDataSources.forEach((serializedSrc) => { - byId[serializedSrc.id] = serializedSrc; - leaves.add(serializedSrc.id); - }); - - const deserialize = ( - serialized: (typeof serializedDataSources)[number] - ): DataSource | null => { - const { type } = serialized; - switch (type) { - case 'file': - return { - type: 'file', - file: fileIDToFile[serialized.fileId], - fileType: serialized.fileType, - }; - case 'archive': { - const parent = dataSourceCache[serialized.parent]; - if (!parent) { - return null; - } - if (parent.type !== 'file') { - return null; - } - return { - type: 'archive', - path: serialized.path, - parent, - }; - } - case 'uri': { - const defaultName = getURLBasename(serialized.uri) || serialized.uri; - return { + switch (src.type) { + case 'uri': + return [ + { type: 'uri', - uri: serialized.uri, - name: serialized.name ?? defaultName, - mime: serialized.mime, - }; - } - case 'collection': { - serialized.sources.forEach((id) => { - leaves.delete(id); - }); - const sources = serialized.sources - .map((id) => dataSourceCache[id]) - .filter((src): src is DataSource => src != null); - if (sources.length === 0) { - return null; - } - return { - type: 'collection', - sources, - }; + uri: src.uri, + name: src.name ?? getURLBasename(src.uri) ?? src.uri, + mime: src.mime, + }, + ]; + + case 'file': { + const filePath = datasetFilePath?.[src.fileId]; + const file = filePath ? pathToFile[filePath] : undefined; + if (file) { + return [{ type: 'file', file, fileType: src.fileType }]; } - default: - throw new Error( - `Encountered an invalid serialized data source: ${type}` - ); - } - }; - - for (let i = 0; i < serializedDataSources.length; i++) { - const serializedSrc = serializedDataSources[i]; - - if (serializedSrc.id in dataSourceCache) { - continue; + console.warn( + `State file missing expected file: ${filePath ?? src.fileId}` + ); + return []; } - const dataSource = deserialize(serializedSrc); - - if (!dataSource) { - const rootUris = findRootUriAncestors(serializedSrc.id, byId); - leaves.delete(serializedSrc.id); - rootUris.forEach((uri) => leaves.add(uri.id)); - continue; - } + case 'archive': + return resolveToLeafSources( + src.parent, + byId, + datasetFilePath, + pathToFile + ); - if (serializedSrc.parent) { - dataSource.parent = dataSourceCache[serializedSrc.parent]; - leaves.delete(serializedSrc.parent); - } + case 'collection': + return src.sources.flatMap((sourceId) => + resolveToLeafSources(sourceId, byId, datasetFilePath, pathToFile) + ); - dataSourceCache[serializedSrc.id] = dataSource; + default: + return []; } - - return { dataSourceCache, leaves, byId }; } function prepareLeafDataSources(manifest: Manifest, datasetFiles: FileEntry[]) { - const { dataSources } = manifest; - const datasets = - manifest.datasets ?? - dataSources - .filter((ds) => ds.type === 'uri') - .map((ds) => ({ id: String(ds.id), dataSourceId: ds.id })); - const datasetFilePath = manifest.datasetFilePath ?? {}; - - const dataSourceIDToStateID = datasets.reduce>( - (acc, ds) => - Object.assign(acc, { - [ds.dataSourceId]: ds.id, - }), - {} - ); - const pathToFile = datasetFiles.reduce>( - (acc, datasetFile) => - Object.assign(acc, { - [datasetFile.archivePath]: datasetFile.file, - }), - {} - ); - const fileIDToFile = Object.entries(datasetFilePath).reduce< - Record - >( - (acc, [fileId, filePath]) => - Object.assign(acc, { - [fileId]: pathToFile[filePath], - }), - {} + const byId: Record = Object.fromEntries( + manifest.dataSources.map((ds) => [ds.id, ds]) ); - const { dataSourceCache, leaves, byId } = rebuildDataSources( - dataSources, - fileIDToFile + const pathToFile: Record = Object.fromEntries( + datasetFiles.map((f) => [f.archivePath, f.file]) ); - const leafDataSources = [...leaves] - .filter((leafId) => leafId in dataSourceCache) - .map((leafId) => { - const dataSource = dataSourceCache[leafId]; - - let stateID = dataSourceIDToStateID[leafId]; + const datasets = + manifest.datasets ?? + manifest.dataSources + .filter((ds) => ds.type === 'uri') + .map((ds) => ({ id: String(ds.id), dataSourceId: ds.id })); - if (!stateID) { - const matchingDataset = datasets.find((ds) => { - const rootUris = findRootUriAncestors(ds.dataSourceId, byId); - return rootUris.some((uri) => uri.id === leafId); - }); - if (matchingDataset) { - stateID = matchingDataset.id; - } - } + return datasets.flatMap((ds) => { + const sources = resolveToLeafSources( + ds.dataSourceId, + byId, + manifest.datasetFilePath, + pathToFile + ); - return { - ...dataSource, - stateFileLeaf: { stateID }, - }; + const seen = new Set(); + const uniqueSources = sources.filter((src) => { + if (src.type !== 'uri') return true; + if (seen.has(src.uri)) return false; + seen.add(src.uri); + return true; }); - return leafDataSources; + return uniqueSources.map((src) => ({ + ...src, + stateFileLeaf: { stateID: ds.id }, + })); + }); } async function completeStateFileRestore(ctx: StateFileContext) { const { manifest, stateFiles, stateIDToStoreID } = ctx; const stateIDToStoreIDRecord = Object.fromEntries(stateIDToStoreID); - // Restore view configs (handles missing configs gracefully) useViewConfigStore().deserializeAll(manifest, stateIDToStoreIDRecord); - // Restore the labelmaps const segmentGroupIDMap = await useSegmentGroupStore().deserialize( manifest, stateFiles, stateIDToStoreIDRecord ); + useLayersStore().deserialize(manifest, stateIDToStoreIDRecord); - // Restore the tools (each tool handles missing data gracefully) useToolStore().deserialize( manifest, segmentGroupIDMap, stateIDToStoreIDRecord ); - - useLayersStore().deserialize(manifest, stateIDToStoreIDRecord); } -const restoreStateFile: ImportHandler = async (dataSource, context) => { +export const restoreStateFile: ImportHandler = async (dataSource, context) => { if (dataSource.type === 'file' && (await isStateFile(dataSource.file))) { const stateFileContents = await extractFilesFromZip(dataSource.file); @@ -311,5 +216,3 @@ const restoreStateFile: ImportHandler = async (dataSource, context) => { } return Skip; }; - -export default restoreStateFile; From c9260568b4858d8c77aab4a08431604deef0835b Mon Sep 17 00:00:00 2001 From: Paul Elliott Date: Sun, 30 Nov 2025 10:25:54 -0500 Subject: [PATCH 07/25] refactor: replace state file callbacks with result-based flow Replace StateFileContext callbacks with StateFileSetupResult type. State file restoration now collects results and completes at end of import instead of tracking pending counts with callbacks. --- src/io/import/common.ts | 25 ++---- src/io/import/importDataSources.ts | 69 +++++++------- src/io/import/processors/restoreStateFile.ts | 95 ++++++-------------- 3 files changed, 67 insertions(+), 122 deletions(-) diff --git a/src/io/import/common.ts b/src/io/import/common.ts index 5c2e6de2c..2d6cab5d6 100644 --- a/src/io/import/common.ts +++ b/src/io/import/common.ts @@ -38,6 +38,13 @@ export interface IntermediateResult { dataSources: DataSource[]; } +export interface StateFileSetupResult { + type: 'stateFileSetup'; + dataSources: DataSource[]; + manifest: Manifest; + stateFiles: FileEntry[]; +} + export interface ErrorResult { type: 'error'; error: Error; @@ -48,6 +55,7 @@ export type ImportResult = | LoadableResult | ConfigResult | IntermediateResult + | StateFileSetupResult | OkayResult | ErrorResult; @@ -101,31 +109,14 @@ export const asOkayResult = (dataSource: DataSource): OkayResult => ({ export type ArchiveContents = Record; export type ArchiveCache = Map>; -export interface StateFileContext { - manifest: Manifest; - stateFiles: FileEntry[]; - stateIDToStoreID: Map; - pendingLeafCount: number; - onLeafImported: (stateID: string, storeID: string) => void; - onAllLeavesImported: () => Promise; -} - export interface ImportContext { - // Caches URL responses fetchFileCache?: FetchCache; - // Caches archives. ArchiveFile -> { [archivePath]: File } archiveCache?: ArchiveCache; - // Records dicom files dicomDataSources?: DataSource[]; onCleanup?: (fn: () => void) => void; - /** - * A reference to importDataSources for nested imports. - */ importDataSources?: ( dataSources: DataSource[] ) => Promise; - // State file restoration context for 3-phase restoration - stateFileContext?: StateFileContext; } export type ImportHandler = ChainHandler< diff --git a/src/io/import/importDataSources.ts b/src/io/import/importDataSources.ts index 842552de1..53c7c96e3 100644 --- a/src/io/import/importDataSources.ts +++ b/src/io/import/importDataSources.ts @@ -10,12 +10,9 @@ import { ErrorResult, ImportDataSourcesResult, asIntermediateResult, + StateFileSetupResult, } from '@/src/io/import/common'; -import { - DataSource, - ChunkSource, - StateFileLeaf, -} from '@/src/io/import/dataSource'; +import { DataSource, ChunkSource } from '@/src/io/import/dataSource'; import handleDicomFile from '@/src/io/import/processors/handleDicomFile'; import extractArchive from '@/src/io/import/processors/extractArchive'; import extractArchiveTarget from '@/src/io/import/processors/extractArchiveTarget'; @@ -23,7 +20,10 @@ import handleAmazonS3 from '@/src/io/import/processors/handleAmazonS3'; import handleGoogleCloudStorage from '@/src/io/import/processors/handleGoogleCloudStorage'; import importSingleFile from '@/src/io/import/processors/importSingleFile'; import handleRemoteManifest from '@/src/io/import/processors/remoteManifest'; -import { restoreStateFile } from '@/src/io/import/processors/restoreStateFile'; +import { + restoreStateFile, + completeStateFileRestore, +} from '@/src/io/import/processors/restoreStateFile'; import updateFileMimeType from '@/src/io/import/processors/updateFileMimeType'; import handleConfig from '@/src/io/import/processors/handleConfig'; import { @@ -74,39 +74,18 @@ const applyConfigsPostState = ( } }); -function findStateFileLeaf(dataSource: DataSource): StateFileLeaf | undefined { +function findStateFileLeaf(dataSource: DataSource) { let current: DataSource | undefined = dataSource; while (current) { if (current.stateFileLeaf) return current.stateFileLeaf; current = current.parent; } - // For collections (DICOM volumes), check the first source's parent chain if (dataSource.type === 'collection' && dataSource.sources.length > 0) { return findStateFileLeaf(dataSource.sources[0]); } return undefined; } -async function handleStateFileResult( - result: LoadableResult, - importContext: ImportContext -) { - const stateLeaf = findStateFileLeaf(result.dataSource); - if (stateLeaf && importContext.stateFileContext) { - const ctx = importContext.stateFileContext; - ctx.stateIDToStoreID.set(stateLeaf.stateID, result.dataID); - - // Phase 2: Immediately bind view to this data so user sees streaming - ctx.onLeafImported(stateLeaf.stateID, result.dataID); - - ctx.pendingLeafCount--; - if (ctx.pendingLeafCount === 0) { - // Phase 3: Restore tools/segments after all data loaded - await ctx.onAllLeavesImported(); - } - } -} - async function importDicomChunkSources(sources: ChunkSource[]) { if (sources.length === 0) return []; @@ -180,6 +159,7 @@ export async function importDataSources( const chunkSources: DataSource[] = []; const configResults: ConfigResult[] = []; + const stateFileSetups: StateFileSetupResult[] = []; const results: ImportDataSourcesResult[] = []; let queue = dataSources.map((src) => ({ @@ -194,6 +174,9 @@ export async function importDataSources( queue = queue.filter((_, i) => i !== index); switch (result.type) { + case 'stateFileSetup': + stateFileSetups.push(result); + // fallthrough to handle dataSources case 'intermediate': { const [chunks, otherSources] = partition( (ds) => ds.type === 'chunk', @@ -201,7 +184,6 @@ export async function importDataSources( ); chunkSources.push(...chunks); - // try loading intermediate results queue.push( ...otherSources.map((src) => ({ promise: evaluateChain(src, handlers, importContext), @@ -220,11 +202,8 @@ export async function importDataSources( break; case 'ok': case 'error': - results.push(result); - break; case 'data': results.push(result); - await handleStateFileResult(result, importContext); break; default: throw new Error(`Invalid result: ${result}`); @@ -243,10 +222,6 @@ export async function importDataSources( try { const dicomResults = await importDicomChunkSources(dicomChunkSources); results.push(...dicomResults); - // Handle state file results for DICOM volumes - for (const dicomResult of dicomResults) { - await handleStateFileResult(dicomResult, importContext); - } } catch (err) { const errorSource = dicomChunkSources.length === 1 @@ -255,11 +230,27 @@ export async function importDataSources( results.push(asErrorResult(ensureError(err), errorSource)); } - // save data sources - useDatasetStore().addDataSources( - results.filter((result): result is LoadableResult => result.type === 'data') + const loadableResults = results.filter( + (r): r is LoadableResult => r.type === 'data' ); + useDatasetStore().addDataSources(loadableResults); + + for (const setup of stateFileSetups) { + const stateIDToStoreID: Record = {}; + for (const loadable of loadableResults) { + const leaf = findStateFileLeaf(loadable.dataSource); + if (leaf) { + stateIDToStoreID[leaf.stateID] = loadable.dataID; + } + } + await completeStateFileRestore( + setup.manifest, + setup.stateFiles, + stateIDToStoreID + ); + } + return results; } diff --git a/src/io/import/processors/restoreStateFile.ts b/src/io/import/processors/restoreStateFile.ts index 6d88fb78a..3d2e92e24 100644 --- a/src/io/import/processors/restoreStateFile.ts +++ b/src/io/import/processors/restoreStateFile.ts @@ -5,9 +5,8 @@ import { } from '@/src/io/state-file/schema'; import { asErrorResult, - asIntermediateResult, ImportHandler, - StateFileContext, + StateFileSetupResult, } from '@/src/io/import/common'; import { MANIFEST, isStateFile } from '@/src/io/state-file'; import { partition, getURLBasename } from '@/src/utils'; @@ -113,27 +112,38 @@ function prepareLeafDataSources(manifest: Manifest, datasetFiles: FileEntry[]) { }); } -async function completeStateFileRestore(ctx: StateFileContext) { - const { manifest, stateFiles, stateIDToStoreID } = ctx; - const stateIDToStoreIDRecord = Object.fromEntries(stateIDToStoreID); +export async function completeStateFileRestore( + manifest: Manifest, + stateFiles: FileEntry[], + stateIDToStoreID: Record +) { + const viewStore = useViewStore(); - useViewConfigStore().deserializeAll(manifest, stateIDToStoreIDRecord); + Object.entries(stateIDToStoreID).forEach(([stateID, storeID]) => { + viewStore.bindViewsToData(stateID, storeID, manifest); + }); + + if (!manifest.viewByID) { + const firstStoreID = Object.values(stateIDToStoreID)[0]; + if (firstStoreID) { + viewStore.setDataForAllViews(firstStoreID); + } + } + + useViewConfigStore().deserializeAll(manifest, stateIDToStoreID); const segmentGroupIDMap = await useSegmentGroupStore().deserialize( manifest, stateFiles, - stateIDToStoreIDRecord + stateIDToStoreID ); - useLayersStore().deserialize(manifest, stateIDToStoreIDRecord); - useToolStore().deserialize( - manifest, - segmentGroupIDMap, - stateIDToStoreIDRecord - ); + useLayersStore().deserialize(manifest, stateIDToStoreID); + + useToolStore().deserialize(manifest, segmentGroupIDMap, stateIDToStoreID); } -export const restoreStateFile: ImportHandler = async (dataSource, context) => { +export const restoreStateFile: ImportHandler = async (dataSource) => { if (dataSource.type === 'file' && (await isStateFile(dataSource.file))) { const stateFileContents = await extractFilesFromZip(dataSource.file); @@ -158,61 +168,14 @@ export const restoreStateFile: ImportHandler = async (dataSource, context) => { ); } - // Phase 1: Set up view layout immediately (without data bindings) - const viewStore = useViewStore(); - viewStore.deserializeLayout(manifest); - - // Prepare leaf data sources with state file tags - const leafDataSources = prepareLeafDataSources(manifest, restOfStateFile); - - if (leafDataSources.length === 0) { - // No datasets to import, complete restoration immediately - await completeStateFileRestore({ - manifest, - stateFiles: restOfStateFile, - stateIDToStoreID: new Map(), - pendingLeafCount: 0, - onLeafImported: () => {}, - onAllLeavesImported: async () => {}, - }); - - // When viewByID is not in manifest, there's no data to assign - return asIntermediateResult([]); - } + useViewStore().deserializeLayout(manifest); - // Set up state file context for phase 2 and 3 callbacks - const stateFileContext: StateFileContext = { + return { + type: 'stateFileSetup', + dataSources: prepareLeafDataSources(manifest, restOfStateFile), manifest, stateFiles: restOfStateFile, - stateIDToStoreID: new Map(), - pendingLeafCount: leafDataSources.length, - onLeafImported: (stateID: string, storeID: string) => { - // Phase 2: Bind view to data as each leaf completes - viewStore.bindViewsToData(stateID, storeID, manifest); - }, - onAllLeavesImported: async () => { - // Phase 3: Restore segment groups, tools, layers after all data loaded - await completeStateFileRestore(stateFileContext); - - // When viewByID is not in manifest, assign first dataset to all views - if (!manifest.viewByID) { - const firstStoreID = stateFileContext.stateIDToStoreID - .values() - .next().value; - if (firstStoreID) { - viewStore.setDataForAllViews(firstStoreID); - } - } - }, - }; - - // Store context for use by main pipeline - if (context) { - context.stateFileContext = stateFileContext; - } - - // Return leaf data sources to be processed by main pipeline - return asIntermediateResult(leafDataSources); + } as StateFileSetupResult; } return Skip; }; From 5ace1a7a0d0b020b6cbf47c80b1453e61679b2dc Mon Sep 17 00:00:00 2001 From: Paul Elliott Date: Sun, 30 Nov 2025 11:10:33 -0500 Subject: [PATCH 08/25] fix: prevent VTK crash when rendering images without scalar data DICOM SEG images don't pre-allocate vtkImageData like regular DICOM, so getVtkImageData() could return empty data causing volume renderer to crash on null scalar texture. --- src/io/import/importDataSources.ts | 3 +-- src/io/import/processors/restoreStateFile.ts | 7 +++++-- src/store/image-cache.ts | 8 +++++++- src/utils/urlParams.ts | 4 +--- 4 files changed, 14 insertions(+), 8 deletions(-) diff --git a/src/io/import/importDataSources.ts b/src/io/import/importDataSources.ts index 53c7c96e3..a00c6ff18 100644 --- a/src/io/import/importDataSources.ts +++ b/src/io/import/importDataSources.ts @@ -1,7 +1,6 @@ import { ImportHandler, ImportResult, - ImportContext, asErrorResult, asLoadableResult, ConfigResult, @@ -122,7 +121,7 @@ export async function importDataSources( while (cleanupHandlers.length) cleanupHandlers.pop()!(); }; - const importContext: ImportContext = { + const importContext = { fetchFileCache: new Map(), onCleanup, importDataSources, diff --git a/src/io/import/processors/restoreStateFile.ts b/src/io/import/processors/restoreStateFile.ts index 3d2e92e24..f13ed1588 100644 --- a/src/io/import/processors/restoreStateFile.ts +++ b/src/io/import/processors/restoreStateFile.ts @@ -19,6 +19,7 @@ import { Skip } from '@/src/utils/evaluateChain'; import { useViewStore } from '@/src/store/views'; import { useViewConfigStore } from '@/src/store/view-configs'; import { migrateManifest } from '@/src/io/state-file/migrations'; +import { useMessageStore } from '@/src/store/messages'; type LeafSource = | { type: 'uri'; uri: string; name: string; mime?: string } @@ -50,8 +51,10 @@ function resolveToLeafSources( if (file) { return [{ type: 'file', file, fileType: src.fileType }]; } - console.warn( - `State file missing expected file: ${filePath ?? src.fileId}` + const missingFile = filePath ?? String(src.fileId); + useMessageStore().addError( + 'State file missing expected file', + missingFile ); return []; } diff --git a/src/store/image-cache.ts b/src/store/image-cache.ts index 01e99e9e8..829b24e12 100644 --- a/src/store/image-cache.ts +++ b/src/store/image-cache.ts @@ -26,7 +26,13 @@ export const useImageCacheStore = defineStore('image-cache', () => { function getVtkImageData(id: Maybe): Maybe { if (!id) return null; - return imageById[id]?.getVtkImageData() ?? null; + const image = imageById[id]; + if (!image) return null; + const data = image.getVtkImageData(); + // ProgressiveImage initializes with empty vtkImageData before actual data loads. + // VTK.js volume renderer crashes on empty data (null scalar texture). + if (!data.getPointData().getScalars()?.getData()?.length) return null; + return data; } function getImageMetadata(id: Maybe): Maybe { diff --git a/src/utils/urlParams.ts b/src/utils/urlParams.ts index a09eb6965..b8b430c8b 100644 --- a/src/utils/urlParams.ts +++ b/src/utils/urlParams.ts @@ -10,9 +10,7 @@ type ParsedUrlParams = { const isValidUrl = (str: string) => { try { - // eslint-disable-next-line no-new - new URL(str.trim(), window.location.href); - return true; + return !!new URL(str.trim(), window.location.href); } catch { return false; } From a62c0f4d1d7d9b536ebfb770bcd7bbf3384ad5a0 Mon Sep 17 00:00:00 2001 From: Paul Elliott Date: Sun, 30 Nov 2025 12:24:03 -0500 Subject: [PATCH 09/25] chore(e2e): speed up tests --- .github/workflows/e2e.yml | 2 +- src/io/import/__tests__/configJson.spec.ts | 45 ++++++++++++++++++ src/io/import/configJson.ts | 4 +- ...ers.e2e.ts => layers-and-rendering.e2e.ts} | 30 +++++++++++- tests/specs/rendering-controls-3d-view.e2e.ts | 34 -------------- tests/specs/sample-rendering.e2e.ts | 23 ++++++---- wdio.shared.conf.ts | 46 +++++++++++++++++-- 7 files changed, 135 insertions(+), 49 deletions(-) create mode 100644 src/io/import/__tests__/configJson.spec.ts rename tests/specs/{layers.e2e.ts => layers-and-rendering.e2e.ts} (75%) delete mode 100644 tests/specs/rendering-controls-3d-view.e2e.ts diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index 07b530e8d..a580e1895 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -10,7 +10,7 @@ jobs: name: E2E Testing on ${{ matrix.os }} runs-on: ${{ matrix.os }} env: - DOWNLOAD_TIMEOUT: 220000 + DOWNLOAD_TIMEOUT: 60000 VITE_SHOW_SAMPLE_DATA: true steps: - uses: actions/checkout@v4 diff --git a/src/io/import/__tests__/configJson.spec.ts b/src/io/import/__tests__/configJson.spec.ts new file mode 100644 index 000000000..5fc90a620 --- /dev/null +++ b/src/io/import/__tests__/configJson.spec.ts @@ -0,0 +1,45 @@ +import { describe, it, expect } from 'vitest'; +import { config } from '@/src/io/import/configJson'; + +describe('config schema', () => { + describe('shortcuts', () => { + it('should accept partial shortcuts', () => { + const result = config.safeParse({ + shortcuts: { + polygon: 'Ctrl+p', + rectangle: 'b', + }, + }); + + expect(result.success).to.be.true; + expect(result.data?.shortcuts).to.deep.equal({ + polygon: 'Ctrl+p', + rectangle: 'b', + }); + }); + + it('should reject invalid shortcut keys', () => { + const result = config.safeParse({ + shortcuts: { + invalidKey: 'Ctrl+x', + }, + }); + + expect(result.success).to.be.false; + }); + + it('should accept empty shortcuts', () => { + const result = config.safeParse({ + shortcuts: {}, + }); + + expect(result.success).to.be.true; + }); + + it('should accept config without shortcuts', () => { + const result = config.safeParse({}); + + expect(result.success).to.be.true; + }); + }); +}); diff --git a/src/io/import/configJson.ts b/src/io/import/configJson.ts index f4706aa83..4900573da 100644 --- a/src/io/import/configJson.ts +++ b/src/io/import/configJson.ts @@ -21,7 +21,9 @@ const layouts = z.record(z.string(), layoutConfig).optional(); // -------------------------------------------------------------------------- // Keyboard shortcuts -const shortcuts = z.record(zodEnumFromObjKeys(ACTIONS), z.string()).optional(); +const shortcuts = z + .partialRecord(zodEnumFromObjKeys(ACTIONS), z.string()) + .optional(); // -------------------------------------------------------------------------- // Labels diff --git a/tests/specs/layers.e2e.ts b/tests/specs/layers-and-rendering.e2e.ts similarity index 75% rename from tests/specs/layers.e2e.ts rename to tests/specs/layers-and-rendering.e2e.ts index 6c1fd4f9d..02d5d7bdc 100644 --- a/tests/specs/layers.e2e.ts +++ b/tests/specs/layers-and-rendering.e2e.ts @@ -3,7 +3,35 @@ import { volViewPage } from '../pageobjects/volview.page'; import { openUrls } from './utils'; import { PROSTATEX_DATASET, MRA_HEAD_NECK_DATASET } from './configTestUtils'; -describe('Add Layer button', () => { +describe('Layers and Rendering', () => { + it('should show 3D rendering controls regardless of active view', async () => { + await openUrls([PROSTATEX_DATASET]); + + await volViewPage.waitForViews(); + + const renderTab = volViewPage.renderingModuleTab; + await renderTab.click(); + + const view3D = await volViewPage.getView3D(); + + const volumeRenderingSection = + await volViewPage.getVolumeRenderingSection(); + await expect(volumeRenderingSection).toExist(); + await expect(volumeRenderingSection).toBeDisplayed(); + + const pwfCanvas = await $('div.pwf-editor canvas'); + await expect(pwfCanvas).toExist(); + + await view3D!.click(); + await expect(volumeRenderingSection).toBeDisplayed(); + + const view2D = await volViewPage.getView2D(); + await view2D!.click(); + + await expect(volumeRenderingSection).toBeDisplayed(); + await expect(pwfCanvas).toExist(); + }); + it('should create overlay with 2 DICOM images', async () => { await openUrls([PROSTATEX_DATASET, MRA_HEAD_NECK_DATASET]); diff --git a/tests/specs/rendering-controls-3d-view.e2e.ts b/tests/specs/rendering-controls-3d-view.e2e.ts deleted file mode 100644 index 2f15c5893..000000000 --- a/tests/specs/rendering-controls-3d-view.e2e.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { volViewPage } from '../pageobjects/volview.page'; -import { openUrls } from './utils'; -import { PROSTATEX_DATASET } from './configTestUtils'; - -describe('3D Rendering Controls', () => { - it('should show controls when 3D view exists regardless of active view', async () => { - await openUrls([PROSTATEX_DATASET]); - - await volViewPage.waitForViews(); - - const renderTab = volViewPage.renderingModuleTab; - await renderTab.click(); - - const view3D = await volViewPage.getView3D(); - - const volumeRenderingSection = - await volViewPage.getVolumeRenderingSection(); - await expect(volumeRenderingSection).toExist(); - await expect(volumeRenderingSection).toBeDisplayed(); - - const pwfCanvas = await $('div.pwf-editor canvas'); - await expect(pwfCanvas).toExist(); - - await view3D!.click(); - await expect(volumeRenderingSection).toBeDisplayed(); - - const view2D = await volViewPage.getView2D(); - await view2D!.click(); - - // Controls should still be displayed because 3D view exists (even if not active) - await expect(volumeRenderingSection).toBeDisplayed(); - await expect(pwfCanvas).toExist(); - }); -}); diff --git a/tests/specs/sample-rendering.e2e.ts b/tests/specs/sample-rendering.e2e.ts index afea042f6..fc11c6d4d 100644 --- a/tests/specs/sample-rendering.e2e.ts +++ b/tests/specs/sample-rendering.e2e.ts @@ -1,22 +1,29 @@ import AppPage from '../pageobjects/volview.page'; -// handle pixel jitter in 3D view -const THRESHOLD = 12; // percent +const THRESHOLD = 12; // percent - handle pixel jitter in 3D view +const RENDER_STABLE_TIMEOUT = 5000; describe('VolView', () => { it('should load and render a sample dataset', async () => { await AppPage.open(); await AppPage.downloadProstateSample(); await AppPage.waitForViews(); - await browser.pause(5000); const layoutContainer = await $('.layout-container'); - const result = await browser.checkElement( - layoutContainer, - 'prostate_sample_views' + await browser.waitUntil( + async () => { + const result = await browser.checkElement( + layoutContainer, + 'prostate_sample_views' + ); + return (result as number) < THRESHOLD; + }, + { + timeout: RENDER_STABLE_TIMEOUT, + interval: 500, + timeoutMsg: `Visual comparison exceeded ${THRESHOLD}% threshold after ${RENDER_STABLE_TIMEOUT}ms`, + } ); - - await expect(result).toBeLessThan(THRESHOLD); }); }); diff --git a/wdio.shared.conf.ts b/wdio.shared.conf.ts index ae285ad38..90f66c8f2 100644 --- a/wdio.shared.conf.ts +++ b/wdio.shared.conf.ts @@ -3,6 +3,33 @@ import * as fs from 'fs'; import type { Options, Capabilities } from '@wdio/types'; import { projectRoot } from './tests/e2eTestUtils'; +const TEST_DATASETS = [ + { + url: 'https://data.kitware.com/api/v1/file/6566aa81c5a2b36857ad1783/download', + name: 'CT000085.dcm', + }, + { + url: 'https://data.kitware.com/api/v1/file/68e9807dbf0f869935e36481/download', + name: 'minimal.dcm', + }, + { + url: 'https://data.kitware.com/api/v1/file/655d42a694ef39bf0a4a8bb3/download', + name: '1-001.dcm', + }, + { + url: 'https://data.kitware.com/api/v1/item/63527c7311dab8142820a338/download', + name: 'prostate.zip', + }, + { + url: 'https://data.kitware.com/api/v1/item/6352a2b311dab8142820a33b/download', + name: 'MRA-Head_and_Neck.zip', + }, + { + url: 'https://data.kitware.com/api/v1/item/635679c311dab8142820a4f4/download', + name: 'fetus.zip', + }, +]; + export const WINDOW_SIZE = [1200, 800] as const; export const TEST_PORT = 4567; // for slow connections try: @@ -39,8 +66,8 @@ export const config: Options.Testrunner = { // =================== logLevel: 'warn', bail: 0, - waitforTimeout: 30000, - connectionRetryTimeout: 120000, + waitforTimeout: 10000, + connectionRetryTimeout: 30000, connectionRetryCount: 3, services: [ [ @@ -72,15 +99,26 @@ export const config: Options.Testrunner = { reporters: ['spec', 'html-nice'], mochaOpts: { ui: 'bdd', - timeout: 160 * 1000, + timeout: 60000, }, // // Hooks // - onPrepare() { + async onPrepare() { fs.mkdirSync(TEMP_DIR, { recursive: true }); + + const downloads = TEST_DATASETS.map(async ({ url, name }) => { + const savePath = path.join(TEMP_DIR, name); + if (fs.existsSync(savePath)) { + return; + } + const response = await fetch(url); + const data = await response.arrayBuffer(); + fs.writeFileSync(savePath, Buffer.from(data)); + }); + await Promise.all(downloads); }, async before( From bb0e5f2f29c26d64c517f7d452c65ec12757e4b4 Mon Sep 17 00:00:00 2001 From: Paul Elliott Date: Sun, 30 Nov 2025 17:37:32 -0500 Subject: [PATCH 10/25] test(e2e): add sparse manifest test for prostate rectangle with lesion label --- .../sparse-manifest-prostate-rectangle.e2e.ts | 95 +++++++++++++++++++ 1 file changed, 95 insertions(+) create mode 100644 tests/specs/sparse-manifest-prostate-rectangle.e2e.ts diff --git a/tests/specs/sparse-manifest-prostate-rectangle.e2e.ts b/tests/specs/sparse-manifest-prostate-rectangle.e2e.ts new file mode 100644 index 000000000..5d712f6b1 --- /dev/null +++ b/tests/specs/sparse-manifest-prostate-rectangle.e2e.ts @@ -0,0 +1,95 @@ +import { PROSTATEX_DATASET } from './configTestUtils'; +import { downloadFile, openVolViewPage, writeManifestToZip } from './utils'; + +describe('Sparse manifest with prostate rectangle', () => { + it('loads prostate dataset with lesion rectangle annotation', async () => { + await downloadFile(PROSTATEX_DATASET.url, PROSTATEX_DATASET.name); + + const sparseManifest = { + version: '6.1.0', + dataSources: [ + { + id: 0, + type: 'uri', + uri: `/tmp/${PROSTATEX_DATASET.name}`, + }, + ], + tools: { + rectangles: { + tools: [ + { + imageID: '0', + frameOfReference: { + planeNormal: [ + 1.4080733262381892e-17, 0.24192188680171967, + 0.9702957272529602, + ], + planeOrigin: [ + -117.91325380387, -75.35208187384475, 52.136969503946816, + ], + }, + slice: 9, + firstPoint: [ + -65.36087045590452, -15.919061788109012, 37.31865385684797, + ], + secondPoint: [ + 22.78165684736155, 47.65944636974224, 21.46675198654735, + ], + label: 'lesion', + }, + ], + labels: { + lesion: { + labelName: 'lesion', + color: 'red', + strokeWidth: 1, + fillColor: 'transparent', + }, + }, + }, + }, + }; + + const fileName = 'sparse-prostate-lesion-rectangle.volview.zip'; + await writeManifestToZip(sparseManifest, fileName); + await openVolViewPage(fileName); + + const annotationsTab = await $( + 'button[data-testid="module-tab-Annotations"]' + ); + await annotationsTab.click(); + + const measurementsTab = await $('button.v-tab*=Measurements'); + await measurementsTab.waitForClickable(); + await measurementsTab.click(); + + await browser.waitUntil( + async () => { + const rectangleEntries = await $$( + '.v-list-item i.mdi-vector-square.tool-icon' + ); + const count = await rectangleEntries.length; + return count >= 1; + }, + { + timeout: 10000, + timeoutMsg: 'Rectangle tool not found in measurements list', + } + ); + + await browser.waitUntil( + async () => { + const listItemTitles = await $$('.v-list-item .v-list-item-title'); + for (const title of listItemTitles) { + const text = await title.getText(); + if (text.includes('lesion')) return true; + } + return false; + }, + { + timeout: 5000, + timeoutMsg: 'Lesion label not found on rectangle annotation', + } + ); + }); +}); From 9613f056f9043e7dd14535caa0403b5564cfadf6 Mon Sep 17 00:00:00 2001 From: Paul Elliott Date: Mon, 1 Dec 2025 09:54:02 -0500 Subject: [PATCH 11/25] fix(state): bump manifest version to 6.1.1 for sparse schema support --- src/io/state-file/index.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/io/state-file/index.ts b/src/io/state-file/index.ts index 0d32b105f..2f94423fd 100644 --- a/src/io/state-file/index.ts +++ b/src/io/state-file/index.ts @@ -12,7 +12,7 @@ import { ARCHIVE_FILE_TYPES } from '@/src/io/mimeTypes'; import { useViewConfigStore } from '@/src/store/view-configs'; export const MANIFEST = 'manifest.json'; -export const MANIFEST_VERSION = '6.1.0'; +export const MANIFEST_VERSION = '6.1.1'; export async function serialize() { const datasetStore = useDatasetStore(); From 9d455cfde0b57e248c8eea1e964f5c31275ce11a Mon Sep 17 00:00:00 2001 From: Paul Elliott Date: Mon, 1 Dec 2025 16:42:20 -0500 Subject: [PATCH 12/25] feat(state): support standalone JSON state files without zip wrapper State files can now be loaded as plain JSON files, not just zipped. This enables simpler workflows when data is referenced via URIs. --- src/io/import/processors/restoreStateFile.ts | 40 +++++++++++++------- src/io/state-file/index.ts | 16 +++++++- tests/specs/sparse-manifest.e2e.ts | 26 ++++++++++++- 3 files changed, 66 insertions(+), 16 deletions(-) diff --git a/src/io/import/processors/restoreStateFile.ts b/src/io/import/processors/restoreStateFile.ts index f13ed1588..64bbbc185 100644 --- a/src/io/import/processors/restoreStateFile.ts +++ b/src/io/import/processors/restoreStateFile.ts @@ -146,20 +146,34 @@ export async function completeStateFileRestore( useToolStore().deserialize(manifest, segmentGroupIDMap, stateIDToStoreID); } -export const restoreStateFile: ImportHandler = async (dataSource) => { - if (dataSource.type === 'file' && (await isStateFile(dataSource.file))) { - const stateFileContents = await extractFilesFromZip(dataSource.file); +async function parseManifestFromZip(file: File) { + const stateFileContents = await extractFilesFromZip(file); - const [manifests, restOfStateFile] = partition( - (dataFile) => dataFile.file.name === MANIFEST, - stateFileContents - ); + const [manifests, restOfStateFile] = partition( + (dataFile) => dataFile.file.name === MANIFEST, + stateFileContents + ); - if (manifests.length !== 1) { - throw new Error('State file does not have exactly 1 manifest'); - } + if (manifests.length !== 1) { + throw new Error('State file does not have exactly 1 manifest'); + } + + const manifestString = await manifests[0].file.text(); + return { manifestString, stateFiles: restOfStateFile }; +} + +async function parseManifestFromJson(file: File) { + const manifestString = await file.text(); + return { manifestString, stateFiles: [] as FileEntry[] }; +} + +export const restoreStateFile: ImportHandler = async (dataSource) => { + if (dataSource.type === 'file' && (await isStateFile(dataSource.file))) { + const isJson = dataSource.fileType === 'application/json'; + const { manifestString, stateFiles } = isJson + ? await parseManifestFromJson(dataSource.file) + : await parseManifestFromZip(dataSource.file); - const manifestString = await manifests[0].file.text(); const migrated = migrateManifest(manifestString); let manifest: Manifest; try { @@ -175,9 +189,9 @@ export const restoreStateFile: ImportHandler = async (dataSource) => { return { type: 'stateFileSetup', - dataSources: prepareLeafDataSources(manifest, restOfStateFile), + dataSources: prepareLeafDataSources(manifest, stateFiles), manifest, - stateFiles: restOfStateFile, + stateFiles, } as StateFileSetupResult; } return Skip; diff --git a/src/io/state-file/index.ts b/src/io/state-file/index.ts index 2f94423fd..34feb4b25 100644 --- a/src/io/state-file/index.ts +++ b/src/io/state-file/index.ts @@ -5,10 +5,11 @@ import { useLayersStore } from '@/src/store/datasets-layers'; import { useToolStore } from '@/src/store/tools'; import { Tools } from '@/src/store/tools/types'; import { useViewStore } from '@/src/store/views'; -import { Manifest } from '@/src/io/state-file/schema'; +import { Manifest, ManifestSchema } from '@/src/io/state-file/schema'; import { retypeFile } from '@/src/io'; import { ARCHIVE_FILE_TYPES } from '@/src/io/mimeTypes'; +import { migrateManifest } from '@/src/io/state-file/migrations'; import { useViewConfigStore } from '@/src/store/view-configs'; export const MANIFEST = 'manifest.json'; @@ -70,11 +71,22 @@ export async function serialize() { export async function isStateFile(file: File) { const typedFile = await retypeFile(file); + if (ARCHIVE_FILE_TYPES.has(typedFile.type)) { const zip = await JSZip.loadAsync(typedFile); - return zip.file(MANIFEST) !== null; } + if (typedFile.type === 'application/json') { + try { + const text = await file.text(); + const migrated = migrateManifest(text); + ManifestSchema.parse(migrated); + return true; + } catch { + return false; + } + } + return false; } diff --git a/tests/specs/sparse-manifest.e2e.ts b/tests/specs/sparse-manifest.e2e.ts index 3c15c4580..c28b53bcc 100644 --- a/tests/specs/sparse-manifest.e2e.ts +++ b/tests/specs/sparse-manifest.e2e.ts @@ -1,5 +1,10 @@ import { MINIMAL_DICOM } from './configTestUtils'; -import { downloadFile, openVolViewPage, writeManifestToZip } from './utils'; +import { + downloadFile, + openVolViewPage, + writeManifestToFile, + writeManifestToZip, +} from './utils'; describe('Sparse manifest.json', () => { it('loads manifest with only URL data source', async () => { @@ -85,4 +90,23 @@ describe('Sparse manifest.json', () => { } ); }); + + it('loads standalone JSON state file (not zipped)', async () => { + await downloadFile(MINIMAL_DICOM.url, MINIMAL_DICOM.name); + + const sparseManifest = { + version: '6.1.0', + dataSources: [ + { + id: 0, + type: 'uri', + uri: `/tmp/${MINIMAL_DICOM.name}`, + }, + ], + }; + + const fileName = 'standalone-state.volview.json'; + await writeManifestToFile(sparseManifest, fileName); + await openVolViewPage(fileName); + }); }); From 97f32b00704c41890780e21ec2307161b313b1eb Mon Sep 17 00:00:00 2001 From: Paul Elliott Date: Mon, 1 Dec 2025 17:03:25 -0500 Subject: [PATCH 13/25] fix(paint): use labelmap's coordinate transform Paint strokes now convert world coordinates to index space using the labelmap's worldToIndex matrix instead of the parent image's. This fixes painting at wrong locations when segment groups have different direction matrices than their parent image. --- src/components/tools/paint/PaintWidget2D.vue | 19 +++++------ src/store/tools/paint.ts | 33 ++++++++++++++------ 2 files changed, 31 insertions(+), 21 deletions(-) diff --git a/src/components/tools/paint/PaintWidget2D.vue b/src/components/tools/paint/PaintWidget2D.vue index 4f7ae225b..1dbe8c6cd 100644 --- a/src/components/tools/paint/PaintWidget2D.vue +++ b/src/components/tools/paint/PaintWidget2D.vue @@ -57,11 +57,8 @@ export default defineComponent({ () => imageMetadata.value.lpsOrientation[viewAxis.value] ); - const worldPointToIndex = (worldPoint: vec3) => { - const { worldToIndex } = imageMetadata.value; - const indexPoint = vec3.create(); - vec3.transformMat4(indexPoint, worldPoint, worldToIndex); - return indexPoint; + const cloneWorldPoint = (worldPoint: vec3) => { + return vec3.clone(worldPoint); }; const widget = view.widgetManager.addWidget( @@ -95,17 +92,17 @@ export default defineComponent({ if (!imageId.value) return; paintStore.setSliceAxis(viewAxisIndex.value, imageId.value); const origin = widgetState.getBrush().getOrigin()!; - const indexPoint = worldPointToIndex(origin); - paintStore.startStroke(indexPoint, viewAxisIndex.value, imageId.value); + const worldPoint = cloneWorldPoint(origin); + paintStore.startStroke(worldPoint, viewAxisIndex.value, imageId.value); paintStore.updatePaintPosition(origin, viewId.value); }); onVTKEvent(widget, 'onInteractionEvent', () => { if (!imageId.value) return; const origin = widgetState.getBrush().getOrigin()!; - const indexPoint = worldPointToIndex(origin); + const worldPoint = cloneWorldPoint(origin); paintStore.placeStrokePoint( - indexPoint, + worldPoint, viewAxisIndex.value, imageId.value ); @@ -114,8 +111,8 @@ export default defineComponent({ onVTKEvent(widget, 'onEndInteractionEvent', () => { if (!imageId.value) return; - const indexPoint = worldPointToIndex(widgetState.getBrush().getOrigin()!); - paintStore.endStroke(indexPoint, viewAxisIndex.value, imageId.value); + const worldPoint = cloneWorldPoint(widgetState.getBrush().getOrigin()!); + paintStore.endStroke(worldPoint, viewAxisIndex.value, imageId.value); }); // --- manipulator --- // diff --git a/src/store/tools/paint.ts b/src/store/tools/paint.ts index c8258333c..6e4495199 100644 --- a/src/store/tools/paint.ts +++ b/src/store/tools/paint.ts @@ -196,14 +196,27 @@ export const usePaintToolStore = defineStore('paint', () => { const lastIndex = strokePoints.value.length - 1; if (lastIndex >= 0) { - const lastPoint = strokePoints.value[lastIndex]; - const prevPoint = + const labelmapWorldToIndex = labelmap.getWorldToIndex(); + const worldToLabelmapIndex = (worldPoint: vec3) => { + const indexPoint = vec3.create(); + vec3.transformMat4(indexPoint, worldPoint, labelmapWorldToIndex); + return indexPoint; + }; + + const lastWorldPoint = strokePoints.value[lastIndex]; + const prevWorldPoint = lastIndex >= 1 ? strokePoints.value[lastIndex - 1] : undefined; + + const lastIndexPoint = worldToLabelmapIndex(lastWorldPoint); + const prevIndexPoint = prevWorldPoint + ? worldToLabelmapIndex(prevWorldPoint) + : undefined; + this.$paint.paintLabelmap( labelmap, axisIndex, - lastPoint, - prevPoint, + lastIndexPoint, + prevIndexPoint, shouldPaint ); } @@ -250,32 +263,32 @@ export const usePaintToolStore = defineStore('paint', () => { function startStroke( this: _This, - indexPoint: vec3, + worldPoint: vec3, axisIndex: 0 | 1 | 2, imageID: string ) { switchToSegmentGroupForImage.call(this, imageID); - strokePoints.value = [vec3.clone(indexPoint)]; + strokePoints.value = [vec3.clone(worldPoint)]; doPaintStroke.call(this, axisIndex, imageID); } function placeStrokePoint( this: _This, - indexPoint: vec3, + worldPoint: vec3, axisIndex: 0 | 1 | 2, imageID: string ) { - strokePoints.value.push(indexPoint); + strokePoints.value.push(worldPoint); doPaintStroke.call(this, axisIndex, imageID); } function endStroke( this: _This, - indexPoint: vec3, + worldPoint: vec3, axisIndex: 0 | 1 | 2, imageID: string ) { - strokePoints.value.push(indexPoint); + strokePoints.value.push(worldPoint); doPaintStroke.call(this, axisIndex, imageID); } From d8b28d763fc05bc22d2c8c1b6254f1d4044cd8a2 Mon Sep 17 00:00:00 2001 From: Paul Elliott Date: Tue, 2 Dec 2025 11:36:57 -0500 Subject: [PATCH 14/25] fix(segmentation): handle labelmaps with different orientations Add coordinate transformation utilities and fix tools to use labelmap's own coordinate system instead of assuming it matches the parent image. Fixes paint, polygon rasterization, slice rendering, and scalar probe when segment groups have different direction matrices than the base image. --- src/components/tools/ScalarProbe.vue | 37 ++++++++-- src/components/tools/paint/PaintWidget2D.vue | 73 +++++++++++-------- src/components/tools/polygon/PolygonTool.vue | 33 +++++++-- .../VtkSegmentationSliceRepresentation.vue | 35 ++++++++- src/store/tools/paint.ts | 12 +-- src/utils/imageSpace.ts | 49 ++++++++++++- 6 files changed, 179 insertions(+), 60 deletions(-) diff --git a/src/components/tools/ScalarProbe.vue b/src/components/tools/ScalarProbe.vue index 32c9d585a..aa4e0be40 100644 --- a/src/components/tools/ScalarProbe.vue +++ b/src/components/tools/ScalarProbe.vue @@ -1,7 +1,9 @@