From 767632ee446295062e56c4939c7bdaf9b95cdd3c Mon Sep 17 00:00:00 2001 From: Cristian Goina Date: Thu, 21 Aug 2025 15:40:30 -0400 Subject: [PATCH 01/21] with current channel and timepoint in the viewIdpath loop --- .../net/preibisch/bigstitcher/spark/CreateFusionContainer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java b/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java index 710c60a7..3cf76da7 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java @@ -462,7 +462,7 @@ else if ( storageType == StorageFormat.ZARR ) for ( int t = 0; t < numTimepoints; ++t ) { final OMEZARREntry omeZarrEntry = new OMEZARREntry( - mrInfos[ 0 ][ 0 ].dataset.substring(0, mrInfos[ 0 ][ 0 ].dataset.lastIndexOf( "/" ) ), + mrInfos[ t ][ c ].dataset.substring(0, mrInfos[ t ][ c ].dataset.lastIndexOf( "/" ) ), new int[] { c, t } ); viewIdToPath.put( new ViewId( t, c ), omeZarrEntry ); From 4374240f1536f0946465157703c03867852963d6 Mon Sep 17 00:00:00 2001 From: Cristian Goina Date: Thu, 21 Aug 2025 15:56:55 -0400 Subject: [PATCH 02/21] Restored group arg changes --- .../spark/CreateFusionContainer.java | 68 ++++++++++++------- .../bigstitcher/spark/SparkAffineFusion.java | 43 ++++++++---- 2 files changed, 72 insertions(+), 39 deletions(-) diff --git a/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java b/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java index 0e19e614..da2021c3 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java @@ -116,8 +116,23 @@ public static enum Compressions { Lz4, Gzip, Zstandard, Blosc, Bzip2, Xz, Raw }; @Option(names = { "--anisotropyFactor" }, description = "define the anisotropy factor if preserveAnisotropy is set to true (default: compute from data)") private double anisotropyFactor = Double.NaN; + @Option(names = { "--group" }, description = "Container group path") + private String groupPath = ""; + URI outPathURI = null, xmlOutURI = null; + /** + * @return container group path always terminated with a '/' + */ + private String getContainerGroupPath() + { + if (!groupPath.endsWith("/")) { + return groupPath + "/"; + } else { + return groupPath; + } + } + @Override public Void call() throws Exception { @@ -299,24 +314,26 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.ZARR ) return null; } - driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/InputXML", xmlURI ); + if ( ! getContainerGroupPath().equals("/") ) driverVolumeWriter.createGroup( getContainerGroupPath() ); + + driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/InputXML", xmlURI ); - driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/NumTimepoints", numTimepoints ); - driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/NumChannels", numChannels ); + driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/NumTimepoints", numTimepoints ); + driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/NumChannels", numChannels ); - driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/Boundingbox_min", boundingBox.minAsLongArray() ); - driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/Boundingbox_max", boundingBox.maxAsLongArray() ); + driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/Boundingbox_min", boundingBox.minAsLongArray() ); + driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/Boundingbox_max", boundingBox.maxAsLongArray() ); - driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/PreserveAnisotropy", preserveAnisotropy ); + driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/PreserveAnisotropy", preserveAnisotropy ); if (preserveAnisotropy) // cannot write Double.NaN into JSON - driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/AnisotropyFactor", anisotropyFactor ); - driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/DataType", dt ); - driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/BlockSize", blockSize ); + driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/AnisotropyFactor", anisotropyFactor ); + driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/DataType", dt ); + driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/BlockSize", blockSize ); if ( minIntensity != null && maxIntensity != null ) { - driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/MinIntensity", minIntensity ); - driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/MaxIntensity", maxIntensity ); + driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/MinIntensity", minIntensity ); + driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/MaxIntensity", maxIntensity ); } // setup datasets and metadata @@ -333,7 +350,7 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.ZARR ) System.out.println( "Creating 5D OME-ZARR metadata for '" + outPathURI + "' ... " ); if ( !bdv ) - driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/FusionFormat", "OME-ZARR" ); + driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", "OME-ZARR" ); final long[] dim3d = boundingBox.dimensionsAsLongArray(); @@ -343,7 +360,7 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.ZARR ) for ( int d = 0; d < ds.length; ++d ) ds[ d ] = new int[] { downsamplings[ d ][ 0 ], downsamplings[ d ][ 1 ], downsamplings[ d ][ 2 ], 1, 1 }; - final Function levelToName = (level) -> "/" + level; + final Function levelToName = (level) -> getContainerGroupPath() + level; mrInfos = new MultiResolutionLevelInfo[ 1 ][]; @@ -370,14 +387,15 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.ZARR ) System.out.println( "Resolution of level 0: " + Util.printCoordinates( resolutionS0 ) + " " + "micrometer" ); //vx.unit() might not be OME-ZARR compatiblevx.unit() ); + final Function levelRelativeToGroup = (level) -> "/" + level; // create metadata final OmeNgffMultiScaleMetadata[] meta = OMEZarrAttibutes.createOMEZarrMetadata( 5, // int n - "/", // String name, I also saw "/" + getContainerGroupPath(), // String name, I also saw "/" resolutionS0, // double[] resolutionS0, "micrometer", //vx.unit() might not be OME-ZARR compatible // String unitXYZ, // e.g micrometer mrInfos[ 0 ].length, // int numResolutionLevels, - levelToName, + levelRelativeToGroup, levelToMipmapTransform ); // save metadata @@ -385,7 +403,7 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.ZARR ) //org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMetadata // for this to work you need to register an adapter in the N5Factory class // final GsonBuilder builder = new GsonBuilder().registerTypeAdapter( CoordinateTransformation.class, new CoordinateTransformationAdapter() ); - driverVolumeWriter.setAttribute( "/", "multiscales", meta ); + driverVolumeWriter.setAttribute( getContainerGroupPath(), "multiscales", meta ); } if ( bdv ) @@ -393,13 +411,13 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.ZARR ) System.out.println( "Creating BDV compatible container at '" + outPathURI + "' ... " ); if ( storageType == StorageFormat.N5 ) - driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/FusionFormat", "BDV/N5" ); + driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", "BDV/N5" ); else if ( storageType == StorageFormat.ZARR ) - driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/FusionFormat", "BDV/OME-ZARR" ); + driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", "BDV/OME-ZARR" ); else - driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/FusionFormat", "BDV/HDF5" ); + driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", "BDV/HDF5" ); - driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/OutputXML", xmlOutURI ); + driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/OutputXML", xmlOutURI ); final long[] bb = boundingBox.dimensionsAsLongArray(); @@ -476,7 +494,7 @@ else if ( storageType == StorageFormat.ZARR ) myMrInfo[ c + t*c ] = N5ApiTools.setupBdvDatasetsN5( driverVolumeWriter, vd, dt, bb, compression, blockSize, downsamplings); - driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/FusionFormat", "BDV/N5" ); + driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", "BDV/N5" ); } else // HDF5 { @@ -492,9 +510,9 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.HDF5 ) mrInfos = new MultiResolutionLevelInfo[ numChannels * numTimepoints ][]; if ( storageType == StorageFormat.N5 ) - driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/FusionFormat", "N5" ); + driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", "N5" ); else - driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/FusionFormat", "HDF5" ); + driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", "HDF5" ); for ( int c = 0; c < numChannels; ++c ) for ( int t = 0; t < numTimepoints; ++t ) @@ -516,7 +534,7 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.HDF5 ) } // TODO: set extra attributes to load the state - driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/MultiResolutionInfos", mrInfos ); + driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/MultiResolutionInfos", mrInfos ); driverVolumeWriter.close(); @@ -535,4 +553,4 @@ public static void main(final String... args) throws SpimDataException System.exit(new CommandLine(new CreateFusionContainer()).execute(args)); } -} \ No newline at end of file +} diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java b/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java index f84e0210..faf50500 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java @@ -163,6 +163,9 @@ public enum DataTypeFusion @CommandLine.Option(names = { "--intensityN5Dataset" }, description = "dataset name for each coefficient dataset (default: \"intensity\"). The coefficients for view(s,t) are stored in dataset \"{-n5Group}/setup{s}/timepoint{t}/{n5Dataset}\"") private String intensityN5Dataset = "intensity"; + @Option(names = { "--group" }, description = "Container group path") + private String groupPath = ""; + URI outPathURI = null; /** * Prefetching now works with a Executors.newCachedThreadPool(); @@ -171,6 +174,18 @@ public enum DataTypeFusion URI intensityN5PathURI = null; + /** + * @return container group path always terminated with a '/' + */ + private String getContainerGroupPath() + { + if (!groupPath.endsWith("/")) { + return groupPath + "/"; + } else { + return groupPath; + } + } + @Override public Void call() throws Exception { @@ -240,7 +255,7 @@ else if ( outputPathURIString.toLowerCase().endsWith( ".h5" ) || outPathURI.toSt final N5Writer driverVolumeWriter = N5Util.createN5Writer( outPathURI, storageType ); - final String fusionFormat = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/FusionFormat", String.class ); + final String fusionFormat = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", String.class ); if ( fusionFormat == null ) { @@ -250,14 +265,14 @@ else if ( outputPathURIString.toLowerCase().endsWith( ".h5" ) || outPathURI.toSt } final boolean bdv = fusionFormat.toLowerCase().contains( "BDV" ); - final URI xmlURI = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/InputXML", URI.class ); + final URI xmlURI = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/InputXML", URI.class ); final int numTimepoints, numChannels; if ( timepointIndex == null ) { - numTimepoints = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/NumTimepoints", int.class ); - numChannels = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/NumChannels", int.class ); + numTimepoints = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/NumTimepoints", int.class ); + numChannels = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/NumChannels", int.class ); } else { @@ -265,16 +280,16 @@ else if ( outputPathURIString.toLowerCase().endsWith( ".h5" ) || outPathURI.toSt numTimepoints = numChannels = 1; } - final long[] bbMin = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/Boundingbox_min", long[].class ); - final long[] bbMax = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/Boundingbox_max", long[].class ); - + final long[] bbMin = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/Boundingbox_min", long[].class ); + final long[] bbMax = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/Boundingbox_max", long[].class ); + final BoundingBox boundingBox = new BoundingBox( new FinalInterval( bbMin, bbMax ) ); - final boolean preserveAnisotropy = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/PreserveAnisotropy", boolean.class ); - final double anisotropyFactor = preserveAnisotropy ? driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/AnisotropyFactor", double.class ) : Double.NaN; - final int[] blockSize = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/BlockSize", int[].class ); + final boolean preserveAnisotropy = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/PreserveAnisotropy", boolean.class ); + final double anisotropyFactor = preserveAnisotropy ? driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/AnisotropyFactor", double.class ) : Double.NaN; + final int[] blockSize = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/BlockSize", int[].class ); - final DataType dataType = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/DataType", DataType.class ); + final DataType dataType = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/DataType", DataType.class ); System.out.println( "FusionFormat: " + fusionFormat ); System.out.println( "Input XML: " + xmlURI ); @@ -290,8 +305,8 @@ else if ( outputPathURIString.toLowerCase().endsWith( ".h5" ) || outPathURI.toSt double minI = Double.NaN, maxI = Double.NaN; try { - minI = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/MinIntensity", double.class ); - maxI = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/MaxIntensity", double.class ); + minI = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/MinIntensity", double.class ); + maxI = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/MaxIntensity", double.class ); } catch ( Exception e ) { @@ -305,7 +320,7 @@ else if ( outputPathURIString.toLowerCase().endsWith( ".h5" ) || outPathURI.toSt System.out.println( "maxIntensity: " + maxI ); final MultiResolutionLevelInfo[][] mrInfos = - driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/MultiResolutionInfos", MultiResolutionLevelInfo[][].class ); + driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/MultiResolutionInfos", MultiResolutionLevelInfo[][].class ); System.out.println( "Loaded " + mrInfos.length + " metadata object for fused " + storageType + " volume(s)" ); From 0d7cf0eeb5bc7c4e78cdca3469b380574ad158b1 Mon Sep 17 00:00:00 2001 From: Cristian Goina Date: Thu, 21 Aug 2025 16:12:54 -0400 Subject: [PATCH 03/21] added some comments --- .../bigstitcher/spark/CreateFusionContainer.java | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java b/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java index da2021c3..1a390a12 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java @@ -314,6 +314,7 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.ZARR ) return null; } + // if there is a group different from the root, create it if ( ! getContainerGroupPath().equals("/") ) driverVolumeWriter.createGroup( getContainerGroupPath() ); driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/InputXML", xmlURI ); @@ -360,14 +361,12 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.ZARR ) for ( int d = 0; d < ds.length; ++d ) ds[ d ] = new int[] { downsamplings[ d ][ 0 ], downsamplings[ d ][ 1 ], downsamplings[ d ][ 2 ], 1, 1 }; - final Function levelToName = (level) -> getContainerGroupPath() + level; - mrInfos = new MultiResolutionLevelInfo[ 1 ][]; // all is 5d now mrInfos[ 0 ] = N5ApiTools.setupMultiResolutionPyramid( driverVolumeWriter, - levelToName, + (level) -> getContainerGroupPath() + level, // multiscale pyramid will be created for the entire provided group dt, dim, //5d compression, @@ -387,7 +386,6 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.ZARR ) System.out.println( "Resolution of level 0: " + Util.printCoordinates( resolutionS0 ) + " " + "micrometer" ); //vx.unit() might not be OME-ZARR compatiblevx.unit() ); - final Function levelRelativeToGroup = (level) -> "/" + level; // create metadata final OmeNgffMultiScaleMetadata[] meta = OMEZarrAttibutes.createOMEZarrMetadata( 5, // int n @@ -395,7 +393,7 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.ZARR ) resolutionS0, // double[] resolutionS0, "micrometer", //vx.unit() might not be OME-ZARR compatible // String unitXYZ, // e.g micrometer mrInfos[ 0 ].length, // int numResolutionLevels, - levelRelativeToGroup, + (level) -> "/" + level, // OME-ZARR metadata will be created relative to the provided group levelToMipmapTransform ); // save metadata From a3ed0ea4ac1bf033e26d3dc821ab55aae5fcafb4 Mon Sep 17 00:00:00 2001 From: Cristian Goina Date: Sun, 14 Sep 2025 21:24:44 -0400 Subject: [PATCH 04/21] factored out Compressions in its own class file --- .../bigstitcher/spark/Compressions.java | 3 ++ .../spark/CreateFusionContainer.java | 8 ---- .../bigstitcher/spark/SparkResaveN5.java | 44 ++++--------------- .../bigstitcher/spark/util/N5Util.java | 2 +- 4 files changed, 12 insertions(+), 45 deletions(-) create mode 100644 src/main/java/net/preibisch/bigstitcher/spark/Compressions.java diff --git a/src/main/java/net/preibisch/bigstitcher/spark/Compressions.java b/src/main/java/net/preibisch/bigstitcher/spark/Compressions.java new file mode 100644 index 00000000..8c9b596e --- /dev/null +++ b/src/main/java/net/preibisch/bigstitcher/spark/Compressions.java @@ -0,0 +1,3 @@ +package net.preibisch.bigstitcher.spark; + +public enum Compressions {Lz4, Gzip, Zstandard, Blosc, Bzip2, Xz, Raw} diff --git a/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java b/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java index 1a390a12..e53fbe9e 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java @@ -59,8 +59,6 @@ public class CreateFusionContainer extends AbstractBasic implements Callable(I)V from class com.google.common.collect.Streams - at com.google.common.collect.Streams.concat(Streams.java:204) - at org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.coordinateTransformations.TransformationUtils.tranformsToAffine(TransformationUtils.java:27) - at org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMultiScaleMetadata.buildMetadata(OmeNgffMultiScaleMetadata.java:159) - at org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMultiScaleMetadata.(OmeNgffMultiScaleMetadata.java:101) - at org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMultiScaleMetadata.(OmeNgffMultiScaleMetadata.java:91) - at net.preibisch.mvrecon.fiji.spimdata.imgloaders.OMEZarrAttibutes.createOMEZarrMetadata(OMEZarrAttibutes.java:128) - at net.preibisch.mvrecon.process.n5api.N5ApiTools.setupBdvDatasetsOMEZARR(N5ApiTools.java:422) - at net.preibisch.bigstitcher.spark.SparkResaveN5.lambda$call$1(SparkResaveN5.java:219) - */ - - /* - local: - com.google.common.collect.ImmutableList: file:/home/preibischs@hhmi.org/.m2/repository/com/google/guava/guava/33.3.1-jre/guava-33.3.1-jre.jar - com.google.common.collect.Streams: file:/home/preibischs@hhmi.org/.m2/repository/com/google/guava/guava/33.3.1-jre/guava-33.3.1-jre.jar - */ - - /* - cluster: - com.google.common.collect.ImmutableList: file:/misc/local/spark-3.4.1/jars/guava-14.0.1.jar - com.google.common.collect.Streams: file:/groups/scicompsoft/home/preibischs/Keller/BigStitcher-Spark-0.1.0-SNAPSHOT.jar - */ - System.out.println( "com.google.common.collect.ImmutableList: " + com.google.common.collect.ImmutableList.class.getProtectionDomain().getCodeSource().getLocation() ); System.out.println( "com.google.common.collect.Streams: " + com.google.common.collect.Streams.class.getProtectionDomain().getCodeSource().getLocation() ); - //System.exit( 0 ); final SpimData2 dataGlobal = this.loadSpimData2(); @@ -174,7 +147,6 @@ public Void call() throws Exception blockSize[1] * blockScale[ 1 ], blockSize[2] * blockScale[ 2 ] }; - //final N5Writer n5 = new N5FSWriter(n5Path); final N5Writer n5Writer = URITools.instantiateN5Writer( useN5 ? StorageFormat.N5 : StorageFormat.ZARR, n5PathURI ); System.out.println( "Compression: " + this.compression ); diff --git a/src/main/java/net/preibisch/bigstitcher/spark/util/N5Util.java b/src/main/java/net/preibisch/bigstitcher/spark/util/N5Util.java index 7d5b52c4..a93a81f1 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/util/N5Util.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/util/N5Util.java @@ -36,7 +36,7 @@ import org.janelia.saalfeldlab.n5.universe.StorageFormat; import org.janelia.scicomp.n5.zstandard.ZstandardCompression; -import net.preibisch.bigstitcher.spark.CreateFusionContainer.Compressions; +import net.preibisch.bigstitcher.spark.Compressions; import net.preibisch.legacy.io.IOFunctions; import util.URITools; From cdf29fbbf04816254d350962010c32d8f6352233 Mon Sep 17 00:00:00 2001 From: Cristian Goina Date: Sun, 14 Sep 2025 21:46:40 -0400 Subject: [PATCH 05/21] removed commented code --- .../java/net/preibisch/bigstitcher/spark/SparkResaveN5.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java b/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java index f801b904..fdbf0377 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java @@ -239,8 +239,6 @@ public Void call() throws Exception if ( localSparkBindAddress ) conf.set("spark.driver.bindAddress", "127.0.0.1"); - //System.exit( 0 ); - final JavaSparkContext sc = new JavaSparkContext(conf); sc.setLogLevel("ERROR"); From 96a0ed152c3b4b1e9c7d10fd79219ede424e529c Mon Sep 17 00:00:00 2001 From: Cristian Goina Date: Sun, 14 Sep 2025 22:19:35 -0400 Subject: [PATCH 06/21] create the dataset.xml for the original czi and resave it as OME-ZARR --- install | 1 + .../spark/CreateStitchingDataset.java | 394 ++++++++++++++++ .../mvrecon/dataset/DatasetBuilder.java | 437 ++++++++++++++++++ 3 files changed, 832 insertions(+) create mode 100644 src/main/java/net/preibisch/bigstitcher/spark/CreateStitchingDataset.java create mode 100644 src/main/java/net/preibisch/mvrecon/dataset/DatasetBuilder.java diff --git a/install b/install index 76be2afd..1052b165 100755 --- a/install +++ b/install @@ -127,6 +127,7 @@ install_command solver "net.preibisch.bigstitcher.spark.Solver" install_command create-fusion-container "net.preibisch.bigstitcher.spark.CreateFusionContainer" install_command affine-fusion "net.preibisch.bigstitcher.spark.SparkAffineFusion" install_command nonrigid-fusion "net.preibisch.bigstitcher.spark.SparkNonRigidFusion" +install_command create-stitching-dataset "net.preibisch.bigstitcher.spark.CreateStitchingDataset" echo 'Installing utils ...' diff --git a/src/main/java/net/preibisch/bigstitcher/spark/CreateStitchingDataset.java b/src/main/java/net/preibisch/bigstitcher/spark/CreateStitchingDataset.java new file mode 100644 index 00000000..2faa2054 --- /dev/null +++ b/src/main/java/net/preibisch/bigstitcher/spark/CreateStitchingDataset.java @@ -0,0 +1,394 @@ +package net.preibisch.bigstitcher.spark; + +import java.io.File; +import java.io.Serializable; +import java.net.URI; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.Callable; +import java.util.stream.Collectors; + +import bdv.img.n5.N5ImageLoader; +import mpicbg.spim.data.SpimDataException; +import mpicbg.spim.data.sequence.ViewDescription; +import mpicbg.spim.data.sequence.ViewId; +import net.imglib2.util.Util; +import net.imglib2.util.ValuePair; +import net.preibisch.bigstitcher.spark.abstractcmdline.AbstractBasic; +import net.preibisch.bigstitcher.spark.abstractcmdline.AbstractInfrastructure; +import net.preibisch.bigstitcher.spark.util.Import; +import net.preibisch.bigstitcher.spark.util.N5Util; +import net.preibisch.bigstitcher.spark.util.RetryTrackerSpark; +import net.preibisch.bigstitcher.spark.util.Spark; +import net.preibisch.mvrecon.dataset.DatasetBuilder; +import net.preibisch.mvrecon.fiji.plugin.resave.ParametersResaveN5Api; +import net.preibisch.mvrecon.fiji.plugin.resave.Resave_HDF5; +import net.preibisch.mvrecon.fiji.spimdata.SpimData2; +import net.preibisch.mvrecon.fiji.spimdata.XmlIoSpimData2; +import net.preibisch.mvrecon.fiji.spimdata.imgloaders.AllenOMEZarrLoader; +import net.preibisch.mvrecon.process.n5api.N5ApiTools; +import org.apache.spark.SparkConf; +import org.apache.spark.api.java.JavaRDD; +import org.apache.spark.api.java.JavaSparkContext; +import org.bigdataviewer.n5.N5CloudImageLoader; +import org.janelia.saalfeldlab.n5.Compression; +import org.janelia.saalfeldlab.n5.DataType; +import org.janelia.saalfeldlab.n5.N5Writer; +import org.janelia.saalfeldlab.n5.universe.StorageFormat; +import picocli.CommandLine; +import picocli.CommandLine.Option; +import util.URITools; + +public class CreateStitchingDataset extends AbstractInfrastructure implements Callable, Serializable { + private static final long serialVersionUID = -9140450542904228386L; + + @Option(names = {"-o", "--outputPath"}, required = true, description = "OME-ZARR/N5/HDF5 path for saving, e.g. -o /home/fused.zarr, file:/home/fused.n5 or e.g. s3://myBucket/data.zarr") + private String outputPathURIString = null; + + @Option(names = {"-s", "--storage"}, defaultValue = "ZARR", showDefaultValue = CommandLine.Help.Visibility.ALWAYS, + description = "Dataset storage type, currently supported OME-ZARR, N5, and ONLY for local, multithreaded Spark HDF5 (default: OME-ZARR)") + private StorageFormat storageType = null; + + @Option(names = {"--intermediate-xml"}, description = "name of the intermediate BigDataViewer xml project") + private String tmpXmlOut = null; + + @Option(names = {"-xo", "--xmlout"}, description = "path to the output xml, e.g. file:/data/dataset.xml or s3://myBucket/data/dataset.xml") + private String xmlOutURIString = null; + + @Option(names = {"--input-path"}, required = true, description = "Path to the input images, e.g. /data/images/") + private String inputPath = "/Users/goinac/Work/HHMI/stitching/datasets/tiny_4_bigstitcher/t1/"; + + @Option(names = {"--input-pattern"}, description = "Glob pattern for input images, e.g. /data/images/*.tif") + private String inputPattern = "*"; + + @Option(names = {"--output-container"}, description = "Output container") + private String outputContainer; + + @Option(names = "--blockSize", description = "blockSize, you can use smaller blocks for HDF5 (default: 128,128,64)") + private String blockSizeString = "128,128,64"; + + @Option(names = "--blockScale", description = "how many blocks to use for a single processing step, e.g. 4,4,1 means for blockSize a 128,128,32 that each spark thread writes 512,512,32 (default: 16,16,1)") + private String blockScaleString = "16,16,1"; + + @Option(names = {"-ds", "--downsampling"}, description = "downsampling pyramid (must contain full res 1,1,1 that is always created), e.g. 1,1,1; 2,2,1; 4,4,1; 8,8,2 (default: automatically computed)") + private String downsampling = null; + + @Option(names = {"-c", "--compression"}, defaultValue = "Zstandard", showDefaultValue = CommandLine.Help.Visibility.ALWAYS, + description = "Dataset compression") + private Compressions compression = null; + + @Option(names = {"-cl", "--compressionLevel"}, description = "compression level, if supported by the codec (default: gzip 1, Zstandard 3, xz 6)") + private Integer compressionLevel = null; + + @Override + public Void call() throws Exception { + this.setRegion(); + + URI outPathURI = URITools.toURI(outputPathURIString + "/"); + SpimData2 spimData = createDataset(); + + URI xmlOutURI = xmlOutURIString != null ? URITools.toURI(xmlOutURIString) : outPathURI.resolve("dataset.xml"); + + System.out.println("Save spimData with original tiles to " + xmlOutURI); + new XmlIoSpimData2().save(spimData, xmlOutURI); + + if (outputContainer != null) { + if (tmpXmlOut != null) { + URI tmpXmlLocation = outPathURI.resolve(tmpXmlOut); + System.out.println("Save intermediate spimData to " + tmpXmlLocation); + new XmlIoSpimData2().save(spimData, tmpXmlLocation); + } + URI outputContainerURI = outPathURI.resolve(outputContainer); + System.out.println("Re-Save data to " + outputContainerURI); + saveDatasetAsN5(spimData, xmlOutURI, outputContainerURI); + } + return null; + } + + private SpimData2 createDataset() { + DatasetBuilder datasetBuilder = new DatasetBuilder(inputPattern); + return datasetBuilder.createDataset(inputPath); + } + + private void saveDatasetAsN5(SpimData2 spimData, URI outputXMLURI, URI outputContainerURI) { + List viewIds = Import.getViewIds(spimData); + if (viewIds.isEmpty()) { + throw new IllegalArgumentException("No views could be generated."); + } + + Collections.sort(viewIds); + + final Compression dataCompression = N5Util.getCompression(this.compression, this.compressionLevel); + + final int[] blockSize = Import.csvStringToIntArray(blockSizeString); + final int[] blockScale = Import.csvStringToIntArray(blockScaleString); + + final int[] computeBlockSize = new int[]{ + blockSize[0] * blockScale[0], + blockSize[1] * blockScale[1], + blockSize[2] * blockScale[2]}; + + final N5Writer n5Writer = URITools.instantiateN5Writer( + storageType == StorageFormat.ZARR ? StorageFormat.ZARR : StorageFormat.N5, + outputContainerURI + ); + + System.out.println("Compression: " + this.compression); + System.out.println("Compression level: " + (compressionLevel == null ? "default" : compressionLevel)); + System.out.println("N5 block size=" + Util.printCoordinates(blockSize)); + System.out.println("Compute block size=" + Util.printCoordinates(computeBlockSize)); + System.out.println("Setting up XML at: " + outputXMLURI); + System.out.println("Setting up N5 writing to basepath: " + outputContainerURI); + + // all ViewSetupIds (needed to create N5 datasets) + final HashMap dimensions = + N5ApiTools.assembleDimensions(spimData, viewIds); + + // all grids across all ViewId's + final List gridS0 = + viewIds.stream().map(viewId -> + N5ApiTools.assembleJobs( + viewId, + dimensions.get(viewId.getViewSetupId()), + blockSize, + computeBlockSize)).flatMap(List::stream).collect(Collectors.toList()); + + final Map dataTypes = + N5ApiTools.assembleDataTypes(spimData, dimensions.keySet()); + + // estimate or read downsampling factors + final int[][] downsamplings; + + if (this.downsampling == null) + downsamplings = N5ApiTools.mipMapInfoToDownsamplings( + Resave_HDF5.proposeMipmaps(N5ApiTools.assembleViewSetups(spimData, viewIds)) + ); + else + downsamplings = Import.csvStringToDownsampling(this.downsampling); + + if (!Import.testFirstDownsamplingIsPresent(downsamplings)) + throw new IllegalStateException("First downsampling step must be full resolution [1,1,...1], stopping."); + + System.out.println("Downsamplings: " + Arrays.deepToString(downsamplings)); + + // create all datasets and write BDV metadata for all ViewIds (including downsampling) in parallel + long time = System.currentTimeMillis(); + + final Map viewIdToMrInfo = + viewIds.parallelStream().map(viewId -> { + final N5ApiTools.MultiResolutionLevelInfo[] mrInfo; + + if (storageType == StorageFormat.ZARR) { + mrInfo = N5ApiTools.setupBdvDatasetsOMEZARR( + n5Writer, + viewId, + dataTypes.get(viewId.getViewSetupId()), + dimensions.get(viewId.getViewSetupId()), + dataCompression, + blockSize, + downsamplings); + } else { + System.out.println(Arrays.toString(blockSize)); + mrInfo = N5ApiTools.setupBdvDatasetsN5( + n5Writer, + viewId, + dataTypes.get(viewId.getViewSetupId()), + dimensions.get(viewId.getViewSetupId()), + dataCompression, + blockSize, + downsamplings); + } + + return new ValuePair<>( + new ViewId( viewId.getTimePointId(), viewId.getViewSetupId() ), // viewId is actually a ViewDescripton object, thus not serializable + mrInfo ); + }).collect(Collectors.toMap(e -> e.getA(), e -> e.getB())); + + System.out.println( "Created BDV-metadata, took " + (System.currentTimeMillis() - time ) + " ms." ); + System.out.println( "Number of compute blocks = " + gridS0.size() ); + + final SparkConf conf = new SparkConf().setAppName("SparkSaveDataset"); + + if ( localSparkBindAddress ) + conf.set("spark.driver.bindAddress", "127.0.0.1"); + + final JavaSparkContext sc = new JavaSparkContext(conf); + sc.setLogLevel("ERROR"); + + // + // Save s0 level + // + time = System.currentTimeMillis(); + final RetryTrackerSpark retryTracker = + RetryTrackerSpark.forGridBlocks("s0 n5-api dataset resaving", gridS0.size()); + + do { + if (!retryTracker.beginAttempt()) { + System.out.println( "Stopping." ); + System.exit( 1 ); + } + + final JavaRDD rdds0 = sc.parallelize( gridS0, Math.min( Spark.maxPartitions, gridS0.size() ) ); + + final JavaRDD rdds0Result = rdds0.map( gridBlock -> { + final SpimData2 dataLocal = Spark.getSparkJobSpimData2(outputXMLURI); + final N5Writer n5Lcl = URITools.instantiateN5Writer( storageType, outputContainerURI ); + + N5ApiTools.resaveS0Block( + dataLocal, + n5Lcl, + storageType, + dataTypes.get( N5ApiTools.gridBlockToViewId( gridBlock ).getViewSetupId() ), + N5ApiTools.gridToDatasetBdv( 0, storageType ), // a function mapping the gridblock to the dataset name for level 0 and N5 + gridBlock ); + + n5Lcl.close(); + + return gridBlock.clone(); + }); + + rdds0Result.cache(); + rdds0Result.count(); + + // extract all blocks that failed + final Set failedBlocksSet = + retryTracker.processWithSpark( rdds0Result, gridS0 ); + + // Use RetryTracker to handle retry counting and removal + if (!retryTracker.processFailures(failedBlocksSet)) { + System.out.println( "Stopping." ); + System.exit( 1 ); + } + + // Update grid for next iteration with remaining failed blocks + gridS0.clear(); + gridS0.addAll(failedBlocksSet); + } + while ( gridS0.size() > 0 ); + + System.out.println( "Saved " + (storageType == StorageFormat.ZARR ? "OME-ZARR 0" : "N5 s0") + "-level, took: " + (System.currentTimeMillis() - time ) + " ms." ); + + // + // Save remaining downsampling levels (s1 ... sN) + // + for ( int level = 1; level < downsamplings.length; ++level ) { + final int s = level; + + //mrInfo.dimensions, mrInfo.blockSize, mrInfo.blockSize + final List allBlocks = + viewIds.stream().map( viewId -> + N5ApiTools.assembleJobs( + viewId, + viewIdToMrInfo.get(viewId)[s] )).flatMap(List::stream).collect( Collectors.toList() ); + + System.out.println( "Downsampling level " + (storageType == StorageFormat.N5 ? "s" : "") + s + "... " ); + System.out.println( "Number of compute blocks: " + allBlocks.size() ); + + final RetryTrackerSpark retryTrackerDS = + RetryTrackerSpark.forGridBlocks( "s" + s +" n5-api dataset resaving", allBlocks.size()); + + final long timeS = System.currentTimeMillis(); + + do + { + if (!retryTrackerDS.beginAttempt()) + { + System.out.println( "Stopping." ); + System.exit( 1 ); + } + + final JavaRDD rddsN = sc.parallelize(allBlocks, Math.min( Spark.maxPartitions, allBlocks.size() ) ); + + final JavaRDD rdds0Result = rddsN.map( gridBlock -> + { + final N5Writer n5Lcl = URITools.instantiateN5Writer( storageType, outputContainerURI ); + + if ( storageType == StorageFormat.ZARR ) { + N5ApiTools.writeDownsampledBlock5dOMEZARR( + n5Lcl, + viewIdToMrInfo.get( N5ApiTools.gridBlockToViewId( gridBlock ) )[ s ], //N5ResaveTools.gridToDatasetBdv( s, StorageType.N5 ), + viewIdToMrInfo.get( N5ApiTools.gridBlockToViewId( gridBlock ) )[ s - 1 ],//N5ResaveTools.gridToDatasetBdv( s - 1, StorageType.N5 ), + gridBlock, + 0, + 0 ); + + } else { + N5ApiTools.writeDownsampledBlock( + n5Lcl, + viewIdToMrInfo.get( N5ApiTools.gridBlockToViewId( gridBlock ) )[ s ], //N5ResaveTools.gridToDatasetBdv( s, StorageType.N5 ), + viewIdToMrInfo.get( N5ApiTools.gridBlockToViewId( gridBlock ) )[ s - 1 ],//N5ResaveTools.gridToDatasetBdv( s - 1, StorageType.N5 ), + gridBlock ); + } + + n5Lcl.close(); + + return gridBlock.clone(); + }); + + rdds0Result.cache(); + rdds0Result.count(); + + // extract all blocks that failed + final Set failedBlocksSet = + retryTrackerDS.processWithSpark( rdds0Result, allBlocks ); + + // Use RetryTracker to handle retry counting and removal + if (!retryTrackerDS.processFailures(failedBlocksSet)) + { + System.out.println( "Stopping." ); + System.exit( 1 ); + } + + // Update grid for next iteration with remaining failed blocks + allBlocks.clear(); + allBlocks.addAll(failedBlocksSet); + + } while ( allBlocks.size() > 0 ); + + System.out.println( "Saved as " + (storageType == StorageFormat.N5 ? "N5 s" : "OME-ZARR ") + s + " level, took: " + (System.currentTimeMillis() - timeS ) + " ms." ); + } + + sc.close(); + + System.out.println( "Saved the entire dataset successfully." ); + + System.out.println( "Saving new xml to: " + outputXMLURI ); + + if (storageType == StorageFormat.N5) { + if (URITools.isFile(outputContainerURI)) { + spimData.getSequenceDescription().setImgLoader( + new N5ImageLoader( outputContainerURI, spimData.getSequenceDescription())); + } else { + spimData.getSequenceDescription().setImgLoader( + new N5CloudImageLoader( null, outputContainerURI, spimData.getSequenceDescription())); // null is OK because the instance is not used now + } + } else { + final Map< ViewId, AllenOMEZarrLoader.OMEZARREntry> viewIdToPath = new HashMap<>(); + + viewIdToMrInfo.forEach( (viewId, mrInfo ) -> + viewIdToPath.put( + viewId, + new AllenOMEZarrLoader.OMEZARREntry( mrInfo[ 0 ].dataset.substring(0, mrInfo[ 0 ].dataset.lastIndexOf( "/" ) ), new int[] { 0, 0 } ) ) + ); + spimData.getSequenceDescription().setImgLoader( + new AllenOMEZarrLoader( outputContainerURI, spimData.getSequenceDescription(), viewIdToPath )); // null is OK because the instance is not used now + } + new XmlIoSpimData2().save( spimData, outputXMLURI ); + + n5Writer.close(); + + System.out.println( "Resaved project, in total took: " + (System.currentTimeMillis() - time ) + " ms." ); + System.out.println( "done." ); + } + + public static void main(final String... args) throws SpimDataException { + System.out.println(Arrays.toString(args)); + + System.exit(new CommandLine(new CreateStitchingDataset()).execute(args)); + } +} diff --git a/src/main/java/net/preibisch/mvrecon/dataset/DatasetBuilder.java b/src/main/java/net/preibisch/mvrecon/dataset/DatasetBuilder.java new file mode 100644 index 00000000..15306277 --- /dev/null +++ b/src/main/java/net/preibisch/mvrecon/dataset/DatasetBuilder.java @@ -0,0 +1,437 @@ +package net.preibisch.mvrecon.dataset; + +import java.io.File; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.file.FileSystems; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.PathMatcher; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import loci.formats.ChannelSeparator; +import loci.formats.IFormatReader; +import loci.formats.meta.MetadataRetrieve; +import mpicbg.spim.data.generic.sequence.BasicViewDescription; +import mpicbg.spim.data.registration.ViewRegistrations; +import mpicbg.spim.data.sequence.Angle; +import mpicbg.spim.data.sequence.Channel; +import mpicbg.spim.data.sequence.FinalVoxelDimensions; +import mpicbg.spim.data.sequence.Illumination; +import mpicbg.spim.data.sequence.ImgLoader; +import mpicbg.spim.data.sequence.SequenceDescription; +import mpicbg.spim.data.sequence.Tile; +import mpicbg.spim.data.sequence.TimePoint; +import mpicbg.spim.data.sequence.TimePoints; +import mpicbg.spim.data.sequence.ViewDescription; +import mpicbg.spim.data.sequence.ViewSetup; +import mpicbg.spim.data.sequence.VoxelDimensions; +import net.imglib2.Dimensions; +import net.imglib2.FinalDimensions; +import net.preibisch.mvrecon.fiji.datasetmanager.DatasetCreationUtils; +import net.preibisch.mvrecon.fiji.spimdata.SpimData2; +import net.preibisch.mvrecon.fiji.spimdata.boundingbox.BoundingBoxes; +import net.preibisch.mvrecon.fiji.spimdata.imgloaders.FileMapImgLoaderLOCI; +import net.preibisch.mvrecon.fiji.spimdata.imgloaders.LegacyStackImgLoaderLOCI; +import net.preibisch.mvrecon.fiji.spimdata.imgloaders.filemap2.FileMapEntry; +import net.preibisch.mvrecon.fiji.spimdata.imgloaders.filemap2.FileMapImgLoaderLOCI2; +import net.preibisch.mvrecon.fiji.spimdata.intensityadjust.IntensityAdjustments; +import net.preibisch.mvrecon.fiji.spimdata.interestpoints.ViewInterestPoints; +import net.preibisch.mvrecon.fiji.spimdata.pointspreadfunctions.PointSpreadFunctions; +import net.preibisch.mvrecon.fiji.spimdata.stitchingresults.StitchingResults; +import ome.units.UNITS; +import ome.units.quantity.Length; +import org.apache.commons.lang3.builder.ToStringBuilder; +import scala.Tuple2; + +public class DatasetBuilder { + + static class ViewIndex { + final int tp, ch, il, ang; + + ViewIndex(int tp, int ch, int il, int ang) { + this.tp = tp; + this.ch = ch; + this.il = il; + this.ang = ang; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) return false; + ViewIndex tileIndex = (ViewIndex) o; + return tp == tileIndex.tp && ch == tileIndex.ch && il == tileIndex.il && ang == tileIndex.ang; + } + + @Override + public int hashCode() { + return Objects.hash(tp, ch, il, ang); + } + + @Override + public String toString() { + return new ToStringBuilder(this) + .append("tp", tp) + .append("ch", ch) + .append("il", il) + .append("ang", ang) + .toString(); + } + } + + static class TileInfo { + final int tileIndex; // global tile (view) index + final Path filePath; // file containing this tile + final int tp; + final int tileName; + final int imageIndex; + final int chIndex; + final String chName; + final int angle; + final int illumination; + final int sizeZ; + final int sizeY; + final int sizeX; + final double z; + final double y; + final double x; + final double resZ; + final double resY; + final double resX; + + TileInfo(int tileIndex, + Path filePath, + int tp, + int tileName, + int imageIndex, + int chIndex, String chName, + int angle, int illumination, + int sizeZ, int sizeY, int sizeX, + double z, double y, double x, + double resZ, double resY, double resX) { + this.tileIndex = tileIndex; + this.filePath = filePath; + this.tp = tp; + this.tileName = tileName; + this.imageIndex = imageIndex; + this.chIndex = chIndex; + this.chName = chName; + this.angle = angle; + this.illumination = illumination; + this.sizeZ = sizeZ; + this.sizeY = sizeY; + this.sizeX = sizeX; + this.z = z; + this.y = y; + this.x = x; + this.resZ = resZ; + this.resY = resY; + this.resX = resX; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) return false; + TileInfo tileInfo = (TileInfo) o; + return tp == tileInfo.tp && imageIndex == tileInfo.imageIndex && tileIndex == tileInfo.tileIndex && chIndex == tileInfo.chIndex && angle == tileInfo.angle && illumination == tileInfo.illumination && Objects.equals(filePath, tileInfo.filePath) && Objects.equals(chName, tileInfo.chName); + } + + @Override + public int hashCode() { + return Objects.hash(filePath, tp, imageIndex, tileIndex, chIndex, chName, angle, illumination); + } + } + + static class StackFile { + final ViewIndex viewIndex; + final int ti; + final Path file; + int nImages = -1; + int nTp = -1; + int nCh = -1; + int sizeZ = -1; + int sizeY = -1; + int sizeX = -1; + + StackFile(int tp, int ch, int il, int ang, int ti, Path file ) + { + this.viewIndex = new ViewIndex(tp, ch, il, ang); + this.ti = ti; + this.file = file; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) return false; + StackFile stackFile = (StackFile) o; + return ti == stackFile.ti && Objects.equals(viewIndex, stackFile.viewIndex) && Objects.equals(file, stackFile.file); + } + + @Override + public int hashCode() { + return Objects.hash(viewIndex, ti, file); + } + + @Override + public String toString() { + return new ToStringBuilder(this) + .append("view", viewIndex) + .append("ti", ti) + .append("file", file) + .toString(); + } + + List loadTileMetadata() + { + List tiles = new ArrayList<>(); + if ( !file.toFile().exists() ) + { + return tiles; + } + + IFormatReader formatReader = new ChannelSeparator(); + try { + if ( !LegacyStackImgLoaderLOCI.createOMEXMLMetadata( formatReader ) ) { + try { + formatReader.close(); + } catch (IOException e) { + e.printStackTrace(); + } + return tiles; + } + + formatReader.setId( file.toString() ); + + MetadataRetrieve retrieve = (MetadataRetrieve)formatReader.getMetadataStore(); + + this.nImages = retrieve.getImageCount(); + this.nTp = formatReader.getSizeT(); + this.nCh = formatReader.getSizeC(); + this.sizeZ = formatReader.getSizeZ(); + this.sizeY = formatReader.getSizeY(); + this.sizeX = formatReader.getSizeX(); + for (int imageIndex = 0; imageIndex < nImages; imageIndex++) { + Length z = retrieve.getPlanePositionZ(imageIndex, 0); + Length y = retrieve.getPlanePositionY(imageIndex, 0); + Length x = retrieve.getPlanePositionX(imageIndex, 0); + Length resX = retrieve.getPixelsPhysicalSizeX(imageIndex); + Length resY = retrieve.getPixelsPhysicalSizeY(imageIndex); + Length resZ = retrieve.getPixelsPhysicalSizeZ(imageIndex); + double zz = z != null ? z.value(UNITS.MICROMETER).doubleValue() : 0; + double yy = y != null ? y.value(UNITS.MICROMETER).doubleValue() : 0; + double xx = x != null ? x.value(UNITS.MICROMETER).doubleValue() : 0; + double rZ = resZ != null ? resZ.value(UNITS.MICROMETER).doubleValue() : 0; + double rY = resY != null ? resY.value(UNITS.MICROMETER).doubleValue() : 0; + double rX = resX != null ? resX.value(UNITS.MICROMETER).doubleValue() : 0; + int imageChannels = retrieve.getChannelCount(imageIndex); + for (int chIndex = 0; chIndex < imageChannels; chIndex++) { + String chName = retrieve.getChannelName(imageIndex, chIndex); + tiles.add(new TileInfo( + (ti * nImages + imageIndex) * imageChannels + chIndex, + file, + viewIndex.tp, + ti, + imageIndex, + chIndex, chName, + viewIndex.ang, viewIndex.il, + sizeZ, sizeY, sizeX, + zz, yy, xx, + rZ, rY, rX)); + } + } + return tiles; + } catch (Exception e) { + throw new IllegalStateException("Could not read " + file, e); + } + } + } + + static class StackPattern { + final String sourcePattern; + final String globPattern; + final Pattern regexPattern; + final Set keys; + + StackPattern( String sourcePattern ) { + this.sourcePattern = sourcePattern; + this.globPattern = sourcePattern + .replaceAll("\\{t\\}", "*") + .replaceAll("\\{c\\}", "*") + .replaceAll("\\{i\\}", "*") + .replaceAll("\\{a\\}", "*") + .replaceAll("\\{x\\}", "*"); + this.regexPattern = Pattern.compile( sourcePattern + .replaceAll("\\.", "\\\\.") // escape dot + .replaceAll("\\*", ".*") + .replaceAll("\\{t\\}", "(?\\\\D*?\\\\d+)") + .replaceAll("\\{c\\}", "(?\\\\D*?\\\\d+)") + .replaceAll("\\{i\\}", "(?\\\\D*?\\\\d+)") + .replaceAll("\\{a\\}", "(?\\\\D*?\\\\d+)") + .replaceAll("\\{x\\}", "(?\\\\D*?\\\\d+)") ); + + this.keys = initializeKeys(sourcePattern); + } + + private Set initializeKeys(String sourcePattern) { + Set patternKeys = new HashSet<>(); + String regexStr = Pattern.quote(sourcePattern); + Matcher m = Pattern.compile("\\{(t|c|i|a|x)\\}").matcher(regexStr); + while (m.find()) { + String key = m.group(1); + patternKeys.add(key); + } + return patternKeys; + } + + String getGlobPattern() { + return "glob:" + globPattern; + } + + int getSearchDepth() { + return (int) sourcePattern.chars().filter(c -> c == File.separatorChar).count(); + } + + boolean hasKey(String key) { + return keys.contains(key); + } + } + + private final StackPattern fileNamePattern; + + public DatasetBuilder(String fileNamePattern ) + { + this.fileNamePattern = new StackPattern(fileNamePattern); + } + + public SpimData2 createDataset(String imageDir) { + Path imagePath = Paths.get(imageDir); + List stackFiles = getStackFiles(imagePath); + + // collect timepoints from stack files + Set timePoints = stackFiles.stream() + .map(si -> new TimePoint(si.viewIndex.tp)) + .collect(Collectors.toSet()); + + // create view setups + Map viewSetups = stackFiles.stream() + .flatMap(sf -> sf.loadTileMetadata().stream()) + .map(tileInfo -> { + Dimensions size = new FinalDimensions(tileInfo.sizeX, tileInfo.sizeY, tileInfo.sizeZ); + VoxelDimensions voxelSize = new FinalVoxelDimensions("um", tileInfo.resX, tileInfo.resY, tileInfo.resZ); + return new Tuple2<>( + tileInfo, + new ViewSetup( + tileInfo.tileIndex, + tileInfo.chName, + size, + voxelSize, + new Tile(tileInfo.imageIndex), + new Channel(tileInfo.chIndex), + new Angle(tileInfo.angle), + new Illumination(tileInfo.illumination) + ) + ); + }) + .collect(Collectors.toMap(t -> t._1, t -> t._2)); + + SequenceDescription sequenceDescription = new SequenceDescription( + new TimePoints(timePoints), + viewSetups.values(), + /*image loader*/null, + null // missing views not handled for now + ); + + ImgLoader imgLoader = createImageLoader(imagePath, viewSetups.keySet(), sequenceDescription); + + sequenceDescription.setImgLoader(imgLoader); + + // get the min resolution from all calibrations + double minResolution = DatasetCreationUtils.minResolution( + sequenceDescription, + sequenceDescription.getViewDescriptions().values() + ); + + ViewRegistrations viewRegistrations = DatasetCreationUtils.createViewRegistrations( + sequenceDescription.getViewDescriptions(), + minResolution + ); + ViewInterestPoints viewInterestPoints = new ViewInterestPoints(); + + return new SpimData2( + imagePath.toUri(), + sequenceDescription, + viewRegistrations, + viewInterestPoints, + new BoundingBoxes(), + new PointSpreadFunctions(), + new StitchingResults(), + new IntensityAdjustments() + ); + } + + private ImgLoader createImageLoader(Path imagePath, Collection tileInfos, SequenceDescription sd) { + Map, FileMapEntry> fileMap = new HashMap<>(); + for (TileInfo ti : tileInfos) { + ViewDescription vdI = sd.getViewDescription( ti.tp, ti.tileIndex ); + fileMap.put( vdI, new FileMapEntry(ti.filePath.toFile(), ti.imageIndex, ti.chIndex) ); + } + + return new FileMapImgLoaderLOCI( + fileMap, + sd, + false + ); + } + + private List getStackFiles(Path imagePath) + { + int searchDepth = fileNamePattern.getSearchDepth(); + try { + // get the files + PathMatcher matcher = FileSystems.getDefault().getPathMatcher(fileNamePattern.getGlobPattern()); + List fs = Files.walk( imagePath , searchDepth+1) + .filter(path -> matcher.matches(imagePath.relativize(path))) + .map(p -> getStackFile(imagePath.relativize(p).toString(), p)) + .collect(Collectors.toList()); + System.out.println(fs); + return fs; + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + private StackFile getStackFile(String matchingPattern, Path filePath) + { + Matcher m = fileNamePattern.regexPattern.matcher(matchingPattern); + if ( m.matches() ) { + int tp = extractInt(fileNamePattern.hasKey("t") ? m.group("tp") : "0"); + int ch = extractInt(fileNamePattern.hasKey("c") ? m.group("ch") : "0"); + int il = extractInt(fileNamePattern.hasKey("i") ? m.group("il") : "0"); + int ang = extractInt(fileNamePattern.hasKey("a") ? m.group("ang") : "0"); + int ti = extractInt(fileNamePattern.hasKey("x") ? m.group("ti") : "0"); + return new StackFile(tp, ch, il, ang, ti, filePath); + } else { + throw new IllegalArgumentException(matchingPattern + " does not match " + fileNamePattern.sourcePattern + ". Refine the pattern and try again"); + } + } + + int extractInt(String input) { + Matcher m = Pattern.compile("\\D*(\\d+)").matcher(input); + if (m.matches()) { + return Integer.parseInt(m.group(1)); + } else { + return 0; + } + } +} From 91a7f54843511886eb31e730f1c78f71a9f4b7c0 Mon Sep 17 00:00:00 2001 From: Cristian Goina Date: Mon, 15 Sep 2025 09:20:45 -0400 Subject: [PATCH 07/21] removed resave from CreateStitchingDataset --- .../spark/CreateStitchingDataset.java | 361 +----------------- ...etBuilder.java => SpimDatasetBuilder.java} | 5 +- 2 files changed, 11 insertions(+), 355 deletions(-) rename src/main/java/net/preibisch/mvrecon/dataset/{DatasetBuilder.java => SpimDatasetBuilder.java} (98%) diff --git a/src/main/java/net/preibisch/bigstitcher/spark/CreateStitchingDataset.java b/src/main/java/net/preibisch/bigstitcher/spark/CreateStitchingDataset.java index 2faa2054..04d5565d 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/CreateStitchingDataset.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/CreateStitchingDataset.java @@ -1,64 +1,22 @@ package net.preibisch.bigstitcher.spark; -import java.io.File; import java.io.Serializable; import java.net.URI; -import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; import java.util.concurrent.Callable; -import java.util.stream.Collectors; -import bdv.img.n5.N5ImageLoader; import mpicbg.spim.data.SpimDataException; -import mpicbg.spim.data.sequence.ViewDescription; -import mpicbg.spim.data.sequence.ViewId; -import net.imglib2.util.Util; -import net.imglib2.util.ValuePair; import net.preibisch.bigstitcher.spark.abstractcmdline.AbstractBasic; -import net.preibisch.bigstitcher.spark.abstractcmdline.AbstractInfrastructure; -import net.preibisch.bigstitcher.spark.util.Import; -import net.preibisch.bigstitcher.spark.util.N5Util; -import net.preibisch.bigstitcher.spark.util.RetryTrackerSpark; -import net.preibisch.bigstitcher.spark.util.Spark; -import net.preibisch.mvrecon.dataset.DatasetBuilder; -import net.preibisch.mvrecon.fiji.plugin.resave.ParametersResaveN5Api; -import net.preibisch.mvrecon.fiji.plugin.resave.Resave_HDF5; +import net.preibisch.mvrecon.dataset.SpimDatasetBuilder; import net.preibisch.mvrecon.fiji.spimdata.SpimData2; import net.preibisch.mvrecon.fiji.spimdata.XmlIoSpimData2; -import net.preibisch.mvrecon.fiji.spimdata.imgloaders.AllenOMEZarrLoader; -import net.preibisch.mvrecon.process.n5api.N5ApiTools; -import org.apache.spark.SparkConf; -import org.apache.spark.api.java.JavaRDD; -import org.apache.spark.api.java.JavaSparkContext; -import org.bigdataviewer.n5.N5CloudImageLoader; -import org.janelia.saalfeldlab.n5.Compression; -import org.janelia.saalfeldlab.n5.DataType; -import org.janelia.saalfeldlab.n5.N5Writer; -import org.janelia.saalfeldlab.n5.universe.StorageFormat; import picocli.CommandLine; import picocli.CommandLine.Option; import util.URITools; -public class CreateStitchingDataset extends AbstractInfrastructure implements Callable, Serializable { - private static final long serialVersionUID = -9140450542904228386L; - - @Option(names = {"-o", "--outputPath"}, required = true, description = "OME-ZARR/N5/HDF5 path for saving, e.g. -o /home/fused.zarr, file:/home/fused.n5 or e.g. s3://myBucket/data.zarr") - private String outputPathURIString = null; - - @Option(names = {"-s", "--storage"}, defaultValue = "ZARR", showDefaultValue = CommandLine.Help.Visibility.ALWAYS, - description = "Dataset storage type, currently supported OME-ZARR, N5, and ONLY for local, multithreaded Spark HDF5 (default: OME-ZARR)") - private StorageFormat storageType = null; - - @Option(names = {"--intermediate-xml"}, description = "name of the intermediate BigDataViewer xml project") - private String tmpXmlOut = null; - - @Option(names = {"-xo", "--xmlout"}, description = "path to the output xml, e.g. file:/data/dataset.xml or s3://myBucket/data/dataset.xml") - private String xmlOutURIString = null; +public class CreateStitchingDataset extends AbstractBasic implements Callable, Serializable +{ + private static final long serialVersionUID = -5155338208494730656L; @Option(names = {"--input-path"}, required = true, description = "Path to the input images, e.g. /data/images/") private String inputPath = "/Users/goinac/Work/HHMI/stitching/datasets/tiny_4_bigstitcher/t1/"; @@ -66,324 +24,23 @@ public class CreateStitchingDataset extends AbstractInfrastructure implements Ca @Option(names = {"--input-pattern"}, description = "Glob pattern for input images, e.g. /data/images/*.tif") private String inputPattern = "*"; - @Option(names = {"--output-container"}, description = "Output container") - private String outputContainer; - - @Option(names = "--blockSize", description = "blockSize, you can use smaller blocks for HDF5 (default: 128,128,64)") - private String blockSizeString = "128,128,64"; - - @Option(names = "--blockScale", description = "how many blocks to use for a single processing step, e.g. 4,4,1 means for blockSize a 128,128,32 that each spark thread writes 512,512,32 (default: 16,16,1)") - private String blockScaleString = "16,16,1"; - - @Option(names = {"-ds", "--downsampling"}, description = "downsampling pyramid (must contain full res 1,1,1 that is always created), e.g. 1,1,1; 2,2,1; 4,4,1; 8,8,2 (default: automatically computed)") - private String downsampling = null; - - @Option(names = {"-c", "--compression"}, defaultValue = "Zstandard", showDefaultValue = CommandLine.Help.Visibility.ALWAYS, - description = "Dataset compression") - private Compressions compression = null; - - @Option(names = {"-cl", "--compressionLevel"}, description = "compression level, if supported by the codec (default: gzip 1, Zstandard 3, xz 6)") - private Integer compressionLevel = null; - @Override public Void call() throws Exception { this.setRegion(); - URI outPathURI = URITools.toURI(outputPathURIString + "/"); SpimData2 spimData = createDataset(); - URI xmlOutURI = xmlOutURIString != null ? URITools.toURI(xmlOutURIString) : outPathURI.resolve("dataset.xml"); + URI xmlURI = URITools.toURI(xmlURIString); - System.out.println("Save spimData with original tiles to " + xmlOutURI); - new XmlIoSpimData2().save(spimData, xmlOutURI); + System.out.println("Save spimData with original tiles to " + xmlURI); + new XmlIoSpimData2().save(spimData, xmlURI); - if (outputContainer != null) { - if (tmpXmlOut != null) { - URI tmpXmlLocation = outPathURI.resolve(tmpXmlOut); - System.out.println("Save intermediate spimData to " + tmpXmlLocation); - new XmlIoSpimData2().save(spimData, tmpXmlLocation); - } - URI outputContainerURI = outPathURI.resolve(outputContainer); - System.out.println("Re-Save data to " + outputContainerURI); - saveDatasetAsN5(spimData, xmlOutURI, outputContainerURI); - } return null; } private SpimData2 createDataset() { - DatasetBuilder datasetBuilder = new DatasetBuilder(inputPattern); - return datasetBuilder.createDataset(inputPath); - } - - private void saveDatasetAsN5(SpimData2 spimData, URI outputXMLURI, URI outputContainerURI) { - List viewIds = Import.getViewIds(spimData); - if (viewIds.isEmpty()) { - throw new IllegalArgumentException("No views could be generated."); - } - - Collections.sort(viewIds); - - final Compression dataCompression = N5Util.getCompression(this.compression, this.compressionLevel); - - final int[] blockSize = Import.csvStringToIntArray(blockSizeString); - final int[] blockScale = Import.csvStringToIntArray(blockScaleString); - - final int[] computeBlockSize = new int[]{ - blockSize[0] * blockScale[0], - blockSize[1] * blockScale[1], - blockSize[2] * blockScale[2]}; - - final N5Writer n5Writer = URITools.instantiateN5Writer( - storageType == StorageFormat.ZARR ? StorageFormat.ZARR : StorageFormat.N5, - outputContainerURI - ); - - System.out.println("Compression: " + this.compression); - System.out.println("Compression level: " + (compressionLevel == null ? "default" : compressionLevel)); - System.out.println("N5 block size=" + Util.printCoordinates(blockSize)); - System.out.println("Compute block size=" + Util.printCoordinates(computeBlockSize)); - System.out.println("Setting up XML at: " + outputXMLURI); - System.out.println("Setting up N5 writing to basepath: " + outputContainerURI); - - // all ViewSetupIds (needed to create N5 datasets) - final HashMap dimensions = - N5ApiTools.assembleDimensions(spimData, viewIds); - - // all grids across all ViewId's - final List gridS0 = - viewIds.stream().map(viewId -> - N5ApiTools.assembleJobs( - viewId, - dimensions.get(viewId.getViewSetupId()), - blockSize, - computeBlockSize)).flatMap(List::stream).collect(Collectors.toList()); - - final Map dataTypes = - N5ApiTools.assembleDataTypes(spimData, dimensions.keySet()); - - // estimate or read downsampling factors - final int[][] downsamplings; - - if (this.downsampling == null) - downsamplings = N5ApiTools.mipMapInfoToDownsamplings( - Resave_HDF5.proposeMipmaps(N5ApiTools.assembleViewSetups(spimData, viewIds)) - ); - else - downsamplings = Import.csvStringToDownsampling(this.downsampling); - - if (!Import.testFirstDownsamplingIsPresent(downsamplings)) - throw new IllegalStateException("First downsampling step must be full resolution [1,1,...1], stopping."); - - System.out.println("Downsamplings: " + Arrays.deepToString(downsamplings)); - - // create all datasets and write BDV metadata for all ViewIds (including downsampling) in parallel - long time = System.currentTimeMillis(); - - final Map viewIdToMrInfo = - viewIds.parallelStream().map(viewId -> { - final N5ApiTools.MultiResolutionLevelInfo[] mrInfo; - - if (storageType == StorageFormat.ZARR) { - mrInfo = N5ApiTools.setupBdvDatasetsOMEZARR( - n5Writer, - viewId, - dataTypes.get(viewId.getViewSetupId()), - dimensions.get(viewId.getViewSetupId()), - dataCompression, - blockSize, - downsamplings); - } else { - System.out.println(Arrays.toString(blockSize)); - mrInfo = N5ApiTools.setupBdvDatasetsN5( - n5Writer, - viewId, - dataTypes.get(viewId.getViewSetupId()), - dimensions.get(viewId.getViewSetupId()), - dataCompression, - blockSize, - downsamplings); - } - - return new ValuePair<>( - new ViewId( viewId.getTimePointId(), viewId.getViewSetupId() ), // viewId is actually a ViewDescripton object, thus not serializable - mrInfo ); - }).collect(Collectors.toMap(e -> e.getA(), e -> e.getB())); - - System.out.println( "Created BDV-metadata, took " + (System.currentTimeMillis() - time ) + " ms." ); - System.out.println( "Number of compute blocks = " + gridS0.size() ); - - final SparkConf conf = new SparkConf().setAppName("SparkSaveDataset"); - - if ( localSparkBindAddress ) - conf.set("spark.driver.bindAddress", "127.0.0.1"); - - final JavaSparkContext sc = new JavaSparkContext(conf); - sc.setLogLevel("ERROR"); - - // - // Save s0 level - // - time = System.currentTimeMillis(); - final RetryTrackerSpark retryTracker = - RetryTrackerSpark.forGridBlocks("s0 n5-api dataset resaving", gridS0.size()); - - do { - if (!retryTracker.beginAttempt()) { - System.out.println( "Stopping." ); - System.exit( 1 ); - } - - final JavaRDD rdds0 = sc.parallelize( gridS0, Math.min( Spark.maxPartitions, gridS0.size() ) ); - - final JavaRDD rdds0Result = rdds0.map( gridBlock -> { - final SpimData2 dataLocal = Spark.getSparkJobSpimData2(outputXMLURI); - final N5Writer n5Lcl = URITools.instantiateN5Writer( storageType, outputContainerURI ); - - N5ApiTools.resaveS0Block( - dataLocal, - n5Lcl, - storageType, - dataTypes.get( N5ApiTools.gridBlockToViewId( gridBlock ).getViewSetupId() ), - N5ApiTools.gridToDatasetBdv( 0, storageType ), // a function mapping the gridblock to the dataset name for level 0 and N5 - gridBlock ); - - n5Lcl.close(); - - return gridBlock.clone(); - }); - - rdds0Result.cache(); - rdds0Result.count(); - - // extract all blocks that failed - final Set failedBlocksSet = - retryTracker.processWithSpark( rdds0Result, gridS0 ); - - // Use RetryTracker to handle retry counting and removal - if (!retryTracker.processFailures(failedBlocksSet)) { - System.out.println( "Stopping." ); - System.exit( 1 ); - } - - // Update grid for next iteration with remaining failed blocks - gridS0.clear(); - gridS0.addAll(failedBlocksSet); - } - while ( gridS0.size() > 0 ); - - System.out.println( "Saved " + (storageType == StorageFormat.ZARR ? "OME-ZARR 0" : "N5 s0") + "-level, took: " + (System.currentTimeMillis() - time ) + " ms." ); - - // - // Save remaining downsampling levels (s1 ... sN) - // - for ( int level = 1; level < downsamplings.length; ++level ) { - final int s = level; - - //mrInfo.dimensions, mrInfo.blockSize, mrInfo.blockSize - final List allBlocks = - viewIds.stream().map( viewId -> - N5ApiTools.assembleJobs( - viewId, - viewIdToMrInfo.get(viewId)[s] )).flatMap(List::stream).collect( Collectors.toList() ); - - System.out.println( "Downsampling level " + (storageType == StorageFormat.N5 ? "s" : "") + s + "... " ); - System.out.println( "Number of compute blocks: " + allBlocks.size() ); - - final RetryTrackerSpark retryTrackerDS = - RetryTrackerSpark.forGridBlocks( "s" + s +" n5-api dataset resaving", allBlocks.size()); - - final long timeS = System.currentTimeMillis(); - - do - { - if (!retryTrackerDS.beginAttempt()) - { - System.out.println( "Stopping." ); - System.exit( 1 ); - } - - final JavaRDD rddsN = sc.parallelize(allBlocks, Math.min( Spark.maxPartitions, allBlocks.size() ) ); - - final JavaRDD rdds0Result = rddsN.map( gridBlock -> - { - final N5Writer n5Lcl = URITools.instantiateN5Writer( storageType, outputContainerURI ); - - if ( storageType == StorageFormat.ZARR ) { - N5ApiTools.writeDownsampledBlock5dOMEZARR( - n5Lcl, - viewIdToMrInfo.get( N5ApiTools.gridBlockToViewId( gridBlock ) )[ s ], //N5ResaveTools.gridToDatasetBdv( s, StorageType.N5 ), - viewIdToMrInfo.get( N5ApiTools.gridBlockToViewId( gridBlock ) )[ s - 1 ],//N5ResaveTools.gridToDatasetBdv( s - 1, StorageType.N5 ), - gridBlock, - 0, - 0 ); - - } else { - N5ApiTools.writeDownsampledBlock( - n5Lcl, - viewIdToMrInfo.get( N5ApiTools.gridBlockToViewId( gridBlock ) )[ s ], //N5ResaveTools.gridToDatasetBdv( s, StorageType.N5 ), - viewIdToMrInfo.get( N5ApiTools.gridBlockToViewId( gridBlock ) )[ s - 1 ],//N5ResaveTools.gridToDatasetBdv( s - 1, StorageType.N5 ), - gridBlock ); - } - - n5Lcl.close(); - - return gridBlock.clone(); - }); - - rdds0Result.cache(); - rdds0Result.count(); - - // extract all blocks that failed - final Set failedBlocksSet = - retryTrackerDS.processWithSpark( rdds0Result, allBlocks ); - - // Use RetryTracker to handle retry counting and removal - if (!retryTrackerDS.processFailures(failedBlocksSet)) - { - System.out.println( "Stopping." ); - System.exit( 1 ); - } - - // Update grid for next iteration with remaining failed blocks - allBlocks.clear(); - allBlocks.addAll(failedBlocksSet); - - } while ( allBlocks.size() > 0 ); - - System.out.println( "Saved as " + (storageType == StorageFormat.N5 ? "N5 s" : "OME-ZARR ") + s + " level, took: " + (System.currentTimeMillis() - timeS ) + " ms." ); - } - - sc.close(); - - System.out.println( "Saved the entire dataset successfully." ); - - System.out.println( "Saving new xml to: " + outputXMLURI ); - - if (storageType == StorageFormat.N5) { - if (URITools.isFile(outputContainerURI)) { - spimData.getSequenceDescription().setImgLoader( - new N5ImageLoader( outputContainerURI, spimData.getSequenceDescription())); - } else { - spimData.getSequenceDescription().setImgLoader( - new N5CloudImageLoader( null, outputContainerURI, spimData.getSequenceDescription())); // null is OK because the instance is not used now - } - } else { - final Map< ViewId, AllenOMEZarrLoader.OMEZARREntry> viewIdToPath = new HashMap<>(); - - viewIdToMrInfo.forEach( (viewId, mrInfo ) -> - viewIdToPath.put( - viewId, - new AllenOMEZarrLoader.OMEZARREntry( mrInfo[ 0 ].dataset.substring(0, mrInfo[ 0 ].dataset.lastIndexOf( "/" ) ), new int[] { 0, 0 } ) ) - ); - spimData.getSequenceDescription().setImgLoader( - new AllenOMEZarrLoader( outputContainerURI, spimData.getSequenceDescription(), viewIdToPath )); // null is OK because the instance is not used now - } - new XmlIoSpimData2().save( spimData, outputXMLURI ); - - n5Writer.close(); - - System.out.println( "Resaved project, in total took: " + (System.currentTimeMillis() - time ) + " ms." ); - System.out.println( "done." ); + SpimDatasetBuilder spimDatasetBuilder = new SpimDatasetBuilder(inputPattern); + return spimDatasetBuilder.createDataset(inputPath); } public static void main(final String... args) throws SpimDataException { diff --git a/src/main/java/net/preibisch/mvrecon/dataset/DatasetBuilder.java b/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java similarity index 98% rename from src/main/java/net/preibisch/mvrecon/dataset/DatasetBuilder.java rename to src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java index 15306277..74ae558b 100644 --- a/src/main/java/net/preibisch/mvrecon/dataset/DatasetBuilder.java +++ b/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java @@ -45,7 +45,6 @@ import net.preibisch.mvrecon.fiji.spimdata.imgloaders.FileMapImgLoaderLOCI; import net.preibisch.mvrecon.fiji.spimdata.imgloaders.LegacyStackImgLoaderLOCI; import net.preibisch.mvrecon.fiji.spimdata.imgloaders.filemap2.FileMapEntry; -import net.preibisch.mvrecon.fiji.spimdata.imgloaders.filemap2.FileMapImgLoaderLOCI2; import net.preibisch.mvrecon.fiji.spimdata.intensityadjust.IntensityAdjustments; import net.preibisch.mvrecon.fiji.spimdata.interestpoints.ViewInterestPoints; import net.preibisch.mvrecon.fiji.spimdata.pointspreadfunctions.PointSpreadFunctions; @@ -55,7 +54,7 @@ import org.apache.commons.lang3.builder.ToStringBuilder; import scala.Tuple2; -public class DatasetBuilder { +public class SpimDatasetBuilder { static class ViewIndex { final int tp, ch, il, ang; @@ -309,7 +308,7 @@ boolean hasKey(String key) { private final StackPattern fileNamePattern; - public DatasetBuilder(String fileNamePattern ) + public SpimDatasetBuilder(String fileNamePattern ) { this.fileNamePattern = new StackPattern(fileNamePattern); } From cdaa430f47702204914c623016a04aa6e7ca3479 Mon Sep 17 00:00:00 2001 From: Cristian Goina Date: Mon, 15 Sep 2025 09:26:13 -0400 Subject: [PATCH 08/21] renamed createstitchingdataset to createdataset --- install | 2 +- .../spark/{CreateStitchingDataset.java => CreateDataset.java} | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) rename src/main/java/net/preibisch/bigstitcher/spark/{CreateStitchingDataset.java => CreateDataset.java} (89%) diff --git a/install b/install index 1052b165..45ea97eb 100755 --- a/install +++ b/install @@ -127,7 +127,7 @@ install_command solver "net.preibisch.bigstitcher.spark.Solver" install_command create-fusion-container "net.preibisch.bigstitcher.spark.CreateFusionContainer" install_command affine-fusion "net.preibisch.bigstitcher.spark.SparkAffineFusion" install_command nonrigid-fusion "net.preibisch.bigstitcher.spark.SparkNonRigidFusion" -install_command create-stitching-dataset "net.preibisch.bigstitcher.spark.CreateStitchingDataset" +install_command create-dataset "net.preibisch.bigstitcher.spark.CreateDataset" echo 'Installing utils ...' diff --git a/src/main/java/net/preibisch/bigstitcher/spark/CreateStitchingDataset.java b/src/main/java/net/preibisch/bigstitcher/spark/CreateDataset.java similarity index 89% rename from src/main/java/net/preibisch/bigstitcher/spark/CreateStitchingDataset.java rename to src/main/java/net/preibisch/bigstitcher/spark/CreateDataset.java index 04d5565d..78629537 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/CreateStitchingDataset.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/CreateDataset.java @@ -14,7 +14,7 @@ import picocli.CommandLine.Option; import util.URITools; -public class CreateStitchingDataset extends AbstractBasic implements Callable, Serializable +public class CreateDataset extends AbstractBasic implements Callable, Serializable { private static final long serialVersionUID = -5155338208494730656L; @@ -46,6 +46,6 @@ private SpimData2 createDataset() { public static void main(final String... args) throws SpimDataException { System.out.println(Arrays.toString(args)); - System.exit(new CommandLine(new CreateStitchingDataset()).execute(args)); + System.exit(new CommandLine(new CreateDataset()).execute(args)); } } From 1559eda60f0229d9e3e5830989b6960b208e13cb Mon Sep 17 00:00:00 2001 From: Cristian Goina Date: Mon, 15 Sep 2025 21:23:35 -0400 Subject: [PATCH 09/21] chain command --- install | 1 + .../bigstitcher/spark/ChainCommands.java | 129 ++++++++++++++++++ 2 files changed, 130 insertions(+) create mode 100644 src/main/java/net/preibisch/bigstitcher/spark/ChainCommands.java diff --git a/install b/install index 45ea97eb..d90f12ed 100755 --- a/install +++ b/install @@ -128,6 +128,7 @@ install_command create-fusion-container "net.preibisch.bigstitcher.spark.CreateF install_command affine-fusion "net.preibisch.bigstitcher.spark.SparkAffineFusion" install_command nonrigid-fusion "net.preibisch.bigstitcher.spark.SparkNonRigidFusion" install_command create-dataset "net.preibisch.bigstitcher.spark.CreateDataset" +install_command chain-commands "net.preibisch.bigstitcher.spark.ChainCommands" echo 'Installing utils ...' diff --git a/src/main/java/net/preibisch/bigstitcher/spark/ChainCommands.java b/src/main/java/net/preibisch/bigstitcher/spark/ChainCommands.java new file mode 100644 index 00000000..62e21c97 --- /dev/null +++ b/src/main/java/net/preibisch/bigstitcher/spark/ChainCommands.java @@ -0,0 +1,129 @@ +package net.preibisch.bigstitcher.spark; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Stack; +import java.util.concurrent.Callable; + +import mpicbg.spim.data.SpimDataException; +import net.preibisch.bigstitcher.spark.abstractcmdline.AbstractInfrastructure; +import picocli.CommandLine; + +public class ChainCommands extends AbstractInfrastructure implements Callable, Serializable +{ + private static final long serialVersionUID = 1584686229152127469L; + + static class CommandWithArguments { + final List cmdArgs; + + CommandWithArguments(List cmdArgs) { + this.cmdArgs = cmdArgs; + } + } + + /** + * Custom converter to collect all arguments after --command until a separator + * (either ';' or '+') or the end of the input. + */ + static class CommandArgsConverter implements CommandLine.IParameterConsumer { + @Override + public void consumeParameters(Stack args, CommandLine.Model.ArgSpec argSpec, CommandLine.Model.CommandSpec commandSpec) { + List currentCommands = argSpec.getValue(); + List commandArgs = new ArrayList<>(); + while (!args.isEmpty()) { + String arg = args.pop(); + + if (";".equals(arg) || "+".equals(arg)) { + break; + } + if (arg.equals("-h") || arg.equals("--help")) { + // add back the help flag at the bottom of the stack + // but before check if there was anything left and if there wasn't stop after this + boolean done = args.isEmpty(); + args.add(0, arg); + if (done) break; + } else + commandArgs.add(arg); + } + currentCommands.add(new CommandWithArguments(commandArgs)); + } + } + + @CommandLine.Option(names = { "-h", "--help" }, description = "display this help message", usageHelp = true) + boolean helpFlag; + + @CommandLine.Option(names = { "--command" }, parameterConsumer = CommandArgsConverter.class, + description = "Command to execute with its arguments. Multiple commands can be chained using ';' or '+'.\n" + + "Example: --command create-dataset --input-path /data/images/ --input-pattern '*.tif' ; " + + "--command detect-interestpoints --detector SIFT --descriptor SIFT ; " + + "--command match-interestpoints --matcher FLANN ; stitching --stitchingModel Affine") + List commands = new ArrayList<>(); + + @Override + public Void call() throws Exception { + for (CommandWithArguments commandArgs : commands) { + if (commandArgs.cmdArgs.isEmpty()) + continue; + + String cmdName = commandArgs.cmdArgs.get(0); + List cmdArgs = new ArrayList<>(commandArgs.cmdArgs.subList(1, commandArgs.cmdArgs.size())); + addCommonOptions(cmdArgs); + + AbstractInfrastructure cmdInstance = getCmdInstance(cmdName); + CommandLine currentCmdLine = new CommandLine(cmdInstance); + System.out.println("Execute command: " + cmdName + " with args: " + cmdArgs); + int exitCode = currentCmdLine.execute(cmdArgs.toArray(new String[0])); + if (exitCode != 0) { + System.err.println("Command " + cmdName + " failed with exit code " + exitCode); + System.exit(exitCode); + } + } + return null; + } + + private AbstractInfrastructure getCmdInstance(String name) { + switch (name) { + case "clear-interestpoints": return new ClearInterestPoints(); + case "clear-registrations": return new ClearRegistrations(); + case "create-container": return new CreateFusionContainer(); + case "detect-interestpoints": return new SparkInterestPointDetection(); + case "match-interestpoints": return new SparkGeometricDescriptorMatching(); + case "nonrigid-fusion": return new SparkNonRigidFusion(); + case "create-dataset": return new CreateDataset(); + case "stitching": return new SparkPairwiseStitching(); + case "resave": return new SparkResaveN5(); + case "downsample": return new SparkDownsample(); + case "affine-fusion": return new SparkAffineFusion(); + case "solver": return new Solver(); + default: throw new IllegalArgumentException("Unknown command: " + name); + } + } + + private void addCommonOptions(List cmdArgs) { + if (this.dryRun) { + cmdArgs.add("--dryRun"); + } + if (this.localSparkBindAddress) { + cmdArgs.add("--localSparkBindAddress"); + } + if (this.s3Region != null && !this.s3Region.isEmpty()) { + cmdArgs.add("--s3Region"); + cmdArgs.add(this.s3Region); + } + } + + public static void main(final String... args) throws SpimDataException { + System.out.println(Arrays.toString(args)); + + ChainCommands chainedCommands = new ChainCommands(); + CommandLine commandLine = new CommandLine(chainedCommands) + .setUnmatchedOptionsArePositionalParams(true) + ; + + + System.exit(commandLine.execute(args)); + } + +} From 4dbff7ce5e39a85ef27879313a3125e654dd666f Mon Sep 17 00:00:00 2001 From: Cristian Goina Date: Tue, 16 Sep 2025 09:31:20 -0400 Subject: [PATCH 10/21] create the parent directory if missing --- .../preibisch/bigstitcher/spark/CreateDataset.java | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/src/main/java/net/preibisch/bigstitcher/spark/CreateDataset.java b/src/main/java/net/preibisch/bigstitcher/spark/CreateDataset.java index 78629537..1a54185c 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/CreateDataset.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/CreateDataset.java @@ -2,6 +2,8 @@ import java.io.Serializable; import java.net.URI; +import java.nio.file.Path; +import java.nio.file.Paths; import java.util.Arrays; import java.util.concurrent.Callable; @@ -33,6 +35,17 @@ public Void call() throws Exception { URI xmlURI = URITools.toURI(xmlURIString); System.out.println("Save spimData with original tiles to " + xmlURI); + if (URITools.isFile( xmlURI )) { + Path xmlPath = Paths.get(xmlURI); + // create parent directories if necessary + if ( !xmlPath.getParent().toFile().exists() ) { + if (!xmlPath.getParent().toFile().mkdirs()) { + // log the error but continue + // if the directory wasn't create it will fail later when trying to write the file + System.out.println("Failed to create parent directory for " + xmlURI); + } + } + } new XmlIoSpimData2().save(spimData, xmlURI); return null; From 3662ce292e2618c712c8f89baa167a2726ca01d1 Mon Sep 17 00:00:00 2001 From: Cristian Goina Date: Thu, 18 Sep 2025 19:03:19 -0400 Subject: [PATCH 11/21] added a mechanism to create a project for an N5 container that has the saalfeld stitching hierarchy, i.e., c/s --- .../bigstitcher/spark/CreateDataset.java | 2 +- .../bigstitcher/spark/SparkAffineFusion.java | 17 +- .../mvrecon/dataset/N5MultichannelLoader.java | 62 +++ .../dataset/N5MultichannelProperties.java | 109 +++++ .../mvrecon/dataset/SpimDatasetBuilder.java | 441 ++++++++++-------- .../dataset/XmlToN5MultichannelLoader.java | 63 +++ 6 files changed, 507 insertions(+), 187 deletions(-) create mode 100644 src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelLoader.java create mode 100644 src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelProperties.java create mode 100644 src/main/java/net/preibisch/mvrecon/dataset/XmlToN5MultichannelLoader.java diff --git a/src/main/java/net/preibisch/bigstitcher/spark/CreateDataset.java b/src/main/java/net/preibisch/bigstitcher/spark/CreateDataset.java index 1a54185c..79dd0d77 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/CreateDataset.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/CreateDataset.java @@ -53,7 +53,7 @@ public Void call() throws Exception { private SpimData2 createDataset() { SpimDatasetBuilder spimDatasetBuilder = new SpimDatasetBuilder(inputPattern); - return spimDatasetBuilder.createDataset(inputPath); + return spimDatasetBuilder.createDataset(URITools.toURI(inputPath)); } public static void main(final String... args) throws SpimDataException { diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java b/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java index faf50500..faf3b967 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java @@ -291,6 +291,10 @@ else if ( outputPathURIString.toLowerCase().endsWith( ".h5" ) || outPathURI.toSt final DataType dataType = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/DataType", DataType.class ); + final long[] orig_bbMin = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/Boundingbox_min", long[].class ); + if ( !Double.isNaN( anisotropyFactor ) ) + orig_bbMin[ 2 ] = Math.round( Math.floor( orig_bbMin[ 2 ] * anisotropyFactor ) ); + System.out.println( "FusionFormat: " + fusionFormat ); System.out.println( "Input XML: " + xmlURI ); System.out.println( "BDV project: " + bdv ); @@ -478,8 +482,6 @@ else if ( intensityN5PathURIString.toLowerCase().endsWith( ".h5" ) || intensityN System.out.println( "numJobs = " + grid.size() ); - //driverVolumeWriter.setAttribute( n5Dataset, "offset", minBB ); - final RetryTrackerSpark retryTracker = RetryTrackerSpark.forGridBlocks("s0 block processing", grid.size()); @@ -499,6 +501,13 @@ else if ( intensityN5PathURIString.toLowerCase().endsWith( ".h5" ) || intensityN { final SpimData2 dataLocal = Spark.getSparkJobSpimData2(xmlURI); + final HashMap< ViewId, AffineTransform3D > orig_registrations = + TransformVirtual.adjustAllTransforms( + viewIds, + dataLocal.getViewRegistrations().getViewRegistrations(), + Double.NaN, + Double.NaN ); + final HashMap< ViewId, AffineTransform3D > registrations = TransformVirtual.adjustAllTransforms( viewIds, @@ -535,7 +544,7 @@ else if ( dataType == DataType.UINT16 ) // The min coordinates of the block that this job renders (in pixels) final int n = gridBlock[ 0 ].length; final long[] superBlockOffset = new long[ n ]; - Arrays.setAll( superBlockOffset, d -> gridBlock[ 0 ][ d ] + bbMin[ d ] ); + Arrays.setAll( superBlockOffset, d -> gridBlock[ 0 ][ d ] + orig_bbMin[ d ] ); // The size of the block that this job renders (in pixels) final long[] superBlockSize = gridBlock[ 1 ]; @@ -550,7 +559,7 @@ else if ( dataType == DataType.UINT16 ) Arrays.setAll( fusedBlockMax, d -> superBlockOffset[ d ] + superBlockSize[ d ] - 1 ); final List< ViewId > overlappingViews = - OverlappingViews.findOverlappingViews( dataLocal, viewIds, registrations, fusedBlock ); + OverlappingViews.findOverlappingViews( dataLocal, viewIds, orig_registrations, fusedBlock ); if ( overlappingViews.size() == 0 ) return gridBlock; diff --git a/src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelLoader.java b/src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelLoader.java new file mode 100644 index 00000000..2b07631a --- /dev/null +++ b/src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelLoader.java @@ -0,0 +1,62 @@ +/*- + * #%L + * Software for the reconstruction of multi-view microscopic acquisitions + * like Selective Plane Illumination Microscopy (SPIM) Data. + * %% + * Copyright (C) 2012 - 2025 Multiview Reconstruction developers. + * %% + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as + * published by the Free Software Foundation, either version 2 of the + * License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public + * License along with this program. If not, see + * . + * #L% + */ +package net.preibisch.mvrecon.dataset; + +import java.net.URI; +import java.util.Map; + +import bdv.img.n5.N5ImageLoader; +import bdv.img.n5.N5Properties; +import mpicbg.spim.data.generic.sequence.AbstractSequenceDescription; +import mpicbg.spim.data.sequence.ViewId; +import org.janelia.saalfeldlab.n5.universe.StorageFormat; +import util.URITools; + +public class N5MultichannelLoader extends N5ImageLoader +{ + private final AbstractSequenceDescription< ?, ?, ? > sequenceDescription; + + private final Map< ViewId, String > viewIdToPath; + + public N5MultichannelLoader( + final URI n5URI, + final StorageFormat storageFormat, + final AbstractSequenceDescription< ?, ?, ? > sequenceDescription, + final Map< ViewId, String > viewIdToPath ) + { + super( URITools.instantiateN5Reader( storageFormat, n5URI ), n5URI, sequenceDescription ); + this.sequenceDescription = sequenceDescription; + + this.viewIdToPath = viewIdToPath; + } + + @Override + protected N5Properties createN5PropertiesInstance() + { + return new N5MultichannelProperties( sequenceDescription, viewIdToPath ); + } + + public Map getViewIdToPath() { + return viewIdToPath; + } +} diff --git a/src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelProperties.java b/src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelProperties.java new file mode 100644 index 00000000..a39f1efb --- /dev/null +++ b/src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelProperties.java @@ -0,0 +1,109 @@ +/*- + * #%L + * Software for the reconstruction of multi-view microscopic acquisitions + * like Selective Plane Illumination Microscopy (SPIM) Data. + * %% + * Copyright (C) 2012 - 2025 Multiview Reconstruction developers. + * %% + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as + * published by the Free Software Foundation, either version 2 of the + * License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public + * License along with this program. If not, see + * . + * #L% + */ +package net.preibisch.mvrecon.dataset; + +import java.util.Arrays; +import java.util.Map; + +import bdv.img.n5.N5Properties; +import mpicbg.spim.data.generic.sequence.AbstractSequenceDescription; +import mpicbg.spim.data.sequence.TimePoint; +import mpicbg.spim.data.sequence.ViewId; +import org.janelia.saalfeldlab.n5.DataType; +import org.janelia.saalfeldlab.n5.N5Reader; + +public class N5MultichannelProperties implements N5Properties +{ + private final AbstractSequenceDescription< ?, ?, ? > sequenceDescription; + + private final Map< ViewId, String > viewIdToPath; + + public N5MultichannelProperties( + final AbstractSequenceDescription< ?, ?, ? > sequenceDescription, + final Map< ViewId, String > viewIdToPath ) + { + this.sequenceDescription = sequenceDescription; + this.viewIdToPath = viewIdToPath; + } + + private String getPath( final int setupId, final int timepointId ) + { + return viewIdToPath.get( new ViewId( timepointId, setupId ) ); + } + + @Override + public String getDatasetPath( final int setupId, final int timepointId, final int level ) + { + return String.format( getPath( setupId, timepointId )+ "/s%d", level ); + } + + @Override + public DataType getDataType( final N5Reader n5, final int setupId ) + { + return getDataType( this, n5, setupId ); + } + + @Override + public double[][] getMipmapResolutions( final N5Reader n5, final int setupId ) + { + return getMipMapResolutions( this, n5, setupId ); + } + + @Override + public long[] getDimensions( final N5Reader n5, final int setupId, final int timepointId, final int level ) + { + final String path = getDatasetPath( setupId, timepointId, level ); + final long[] dimensions = n5.getDatasetAttributes( path ).getDimensions(); + return Arrays.copyOf( dimensions, 3 ); + } + + // + // static methods + // + + private static int getFirstAvailableTimepointId( final AbstractSequenceDescription< ?, ?, ? > seq, final int setupId ) + { + for ( final TimePoint tp : seq.getTimePoints().getTimePointsOrdered() ) + { + if ( seq.getMissingViews() == null || seq.getMissingViews().getMissingViews() == null || !seq.getMissingViews().getMissingViews().contains( new ViewId( tp.getId(), setupId ) ) ) + return tp.getId(); + } + + throw new RuntimeException( "All timepoints for setupId " + setupId + " are declared missing. Stopping." ); + } + + private static DataType getDataType(final N5MultichannelProperties n5properties, final N5Reader n5, final int setupId ) + { + final int timePointId = getFirstAvailableTimepointId( n5properties.sequenceDescription, setupId ); + return n5.getDatasetAttributes( n5properties.getDatasetPath( setupId, timePointId, 0 ) ).getDataType(); + } + + private static double[][] getMipMapResolutions(final N5MultichannelProperties n5properties, final N5Reader n5, final int setupId ) + { + final int timePointId = getFirstAvailableTimepointId( n5properties.sequenceDescription, setupId ); + + // read scales and pixelResolution attributes from the base container and build the mipmap resolutions from that + + return new double[][] { { 1, 1, 1 } }; // default + } +} diff --git a/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java b/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java index 74ae558b..b30ed9ab 100644 --- a/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java +++ b/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java @@ -3,13 +3,14 @@ import java.io.File; import java.io.IOException; import java.io.UncheckedIOException; +import java.net.URI; import java.nio.file.FileSystems; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.PathMatcher; import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.Collection; +import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -29,12 +30,12 @@ import mpicbg.spim.data.sequence.Channel; import mpicbg.spim.data.sequence.FinalVoxelDimensions; import mpicbg.spim.data.sequence.Illumination; -import mpicbg.spim.data.sequence.ImgLoader; import mpicbg.spim.data.sequence.SequenceDescription; import mpicbg.spim.data.sequence.Tile; import mpicbg.spim.data.sequence.TimePoint; import mpicbg.spim.data.sequence.TimePoints; import mpicbg.spim.data.sequence.ViewDescription; +import mpicbg.spim.data.sequence.ViewId; import mpicbg.spim.data.sequence.ViewSetup; import mpicbg.spim.data.sequence.VoxelDimensions; import net.imglib2.Dimensions; @@ -52,7 +53,10 @@ import ome.units.UNITS; import ome.units.quantity.Length; import org.apache.commons.lang3.builder.ToStringBuilder; -import scala.Tuple2; +import org.janelia.saalfeldlab.n5.DatasetAttributes; +import org.janelia.saalfeldlab.n5.N5FSReader; +import org.janelia.saalfeldlab.n5.N5Reader; +import org.janelia.saalfeldlab.n5.universe.StorageFormat; public class SpimDatasetBuilder { @@ -89,73 +93,11 @@ public String toString() { } } - static class TileInfo { - final int tileIndex; // global tile (view) index - final Path filePath; // file containing this tile - final int tp; - final int tileName; - final int imageIndex; - final int chIndex; - final String chName; - final int angle; - final int illumination; - final int sizeZ; - final int sizeY; - final int sizeX; - final double z; - final double y; - final double x; - final double resZ; - final double resY; - final double resX; - - TileInfo(int tileIndex, - Path filePath, - int tp, - int tileName, - int imageIndex, - int chIndex, String chName, - int angle, int illumination, - int sizeZ, int sizeY, int sizeX, - double z, double y, double x, - double resZ, double resY, double resX) { - this.tileIndex = tileIndex; - this.filePath = filePath; - this.tp = tp; - this.tileName = tileName; - this.imageIndex = imageIndex; - this.chIndex = chIndex; - this.chName = chName; - this.angle = angle; - this.illumination = illumination; - this.sizeZ = sizeZ; - this.sizeY = sizeY; - this.sizeX = sizeX; - this.z = z; - this.y = y; - this.x = x; - this.resZ = resZ; - this.resY = resY; - this.resX = resX; - } - - @Override - public boolean equals(Object o) { - if (o == null || getClass() != o.getClass()) return false; - TileInfo tileInfo = (TileInfo) o; - return tp == tileInfo.tp && imageIndex == tileInfo.imageIndex && tileIndex == tileInfo.tileIndex && chIndex == tileInfo.chIndex && angle == tileInfo.angle && illumination == tileInfo.illumination && Objects.equals(filePath, tileInfo.filePath) && Objects.equals(chName, tileInfo.chName); - } - - @Override - public int hashCode() { - return Objects.hash(filePath, tp, imageIndex, tileIndex, chIndex, chName, angle, illumination); - } - } - static class StackFile { final ViewIndex viewIndex; final int ti; - final Path file; + final URI baseURI; + final String relativeFilePath; int nImages = -1; int nTp = -1; int nCh = -1; @@ -163,23 +105,27 @@ static class StackFile { int sizeY = -1; int sizeX = -1; - StackFile(int tp, int ch, int il, int ang, int ti, Path file ) + StackFile(int tp, int ch, int il, int ang, int ti, URI baseURI, String relativeFilePath ) { this.viewIndex = new ViewIndex(tp, ch, il, ang); this.ti = ti; - this.file = file; + this.baseURI = baseURI; + this.relativeFilePath = relativeFilePath; } @Override public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; StackFile stackFile = (StackFile) o; - return ti == stackFile.ti && Objects.equals(viewIndex, stackFile.viewIndex) && Objects.equals(file, stackFile.file); + return ti == stackFile.ti && + Objects.equals(viewIndex, stackFile.viewIndex) && + Objects.equals(baseURI, stackFile.baseURI) && + Objects.equals(relativeFilePath, stackFile.relativeFilePath); } @Override public int hashCode() { - return Objects.hash(viewIndex, ti, file); + return Objects.hash(viewIndex, ti, baseURI, relativeFilePath); } @Override @@ -187,73 +133,232 @@ public String toString() { return new ToStringBuilder(this) .append("view", viewIndex) .append("ti", ti) - .append("file", file) + .append("baseURI", baseURI) + .append("relativeFilePath", relativeFilePath) .toString(); } - List loadTileMetadata() - { - List tiles = new ArrayList<>(); - if ( !file.toFile().exists() ) - { - return tiles; + Path getFilePath() { + return Paths.get(baseURI).resolve(relativeFilePath); + } + + int getTp() { + return viewIndex.tp; + } + + int getTi() { + return ti; + } + + public int getCh() { + return viewIndex.ch; + } + + public int getAng() { + return viewIndex.ang; + } + + public int getIl() { + return viewIndex.il; + } + } + + interface ViewSetupBuilder { + SequenceDescription getSequenceDescription(); + ViewSetupBuilder setImgLoader(); + ViewSetupBuilder createViewSetups(List stackFiles); + } + + + static class LOCIViewSetupBuilder implements ViewSetupBuilder { + + private final SequenceDescription sequenceDescription; + private final Map viewToStackFileMap = new HashMap<>(); + + LOCIViewSetupBuilder(TimePoints timePoints) { + this.sequenceDescription = new SequenceDescription( + timePoints, + /*view setups*/Collections.emptyList() + ); + } + + @Override + public SequenceDescription getSequenceDescription() { + return sequenceDescription; + } + + @Override + public LOCIViewSetupBuilder setImgLoader() { + Map, FileMapEntry> fileMap = new HashMap<>(); + for (ViewSetup vs : sequenceDescription.getViewSetupsOrdered()) { + StackFile stackFile = viewToStackFileMap.get(vs.getId()); + ViewDescription vdI = sequenceDescription.getViewDescription( stackFile.getTp(), vs.getId() ); + fileMap.put( vdI, new FileMapEntry(stackFile.getFilePath().toFile(), vs.getTile().getId(), vs.getChannel().getId()) ); } - IFormatReader formatReader = new ChannelSeparator(); - try { - if ( !LegacyStackImgLoaderLOCI.createOMEXMLMetadata( formatReader ) ) { - try { - formatReader.close(); - } catch (IOException e) { - e.printStackTrace(); + sequenceDescription.setImgLoader(new FileMapImgLoaderLOCI( + fileMap, + sequenceDescription, + false + )); + return this; + } + + @SuppressWarnings("unchecked") + @Override + public LOCIViewSetupBuilder createViewSetups(List stackFiles) { + stackFiles.forEach( stackFile -> { + File tileFile = stackFile.getFilePath().toFile(); + if ( !tileFile.exists() ) + { + return; + } + IFormatReader formatReader = new ChannelSeparator(); + try { + if ( !LegacyStackImgLoaderLOCI.createOMEXMLMetadata( formatReader ) ) { + try { + formatReader.close(); + } catch (IOException e) { + e.printStackTrace(); + } + return; + } + + formatReader.setId( tileFile.toString() ); + + MetadataRetrieve retrieve = (MetadataRetrieve)formatReader.getMetadataStore(); + + stackFile.nImages = retrieve.getImageCount(); + stackFile.nTp = formatReader.getSizeT(); + stackFile.nCh = formatReader.getSizeC(); + stackFile.sizeZ = formatReader.getSizeZ(); + stackFile.sizeY = formatReader.getSizeY(); + stackFile.sizeX = formatReader.getSizeX(); + for (int imageIndex = 0; imageIndex < stackFile.nImages; imageIndex++) { + Length resX = retrieve.getPixelsPhysicalSizeX(imageIndex); + Length resY = retrieve.getPixelsPhysicalSizeY(imageIndex); + Length resZ = retrieve.getPixelsPhysicalSizeZ(imageIndex); + double rZ = resZ != null ? resZ.value(UNITS.MICROMETER).doubleValue() : 0; + double rY = resY != null ? resY.value(UNITS.MICROMETER).doubleValue() : 0; + double rX = resX != null ? resX.value(UNITS.MICROMETER).doubleValue() : 0; + int imageChannels = retrieve.getChannelCount(imageIndex); + for (int chIndex = 0; chIndex < imageChannels; chIndex++) { + String chName = retrieve.getChannelName(imageIndex, chIndex); + int viewIndex = (stackFile.getTi() * stackFile.nImages + imageIndex) * imageChannels + chIndex; + ViewSetup vs = new ViewSetup( + viewIndex, + chName, + new FinalDimensions(stackFile.sizeX, stackFile.sizeY, stackFile.sizeZ), + new FinalVoxelDimensions("um", rX, rY, rZ), + new Tile(imageIndex), + new Channel(chIndex), + new Angle(stackFile.getAng()), + new Illumination(stackFile.getIl()) + ); + viewToStackFileMap.put(viewIndex, stackFile); + ((Map) sequenceDescription.getViewSetups()).put(viewIndex, vs); + } } - return tiles; + } catch (Exception e) { + throw new IllegalStateException("Could not read " + stackFile, e); } + }); + return this; + } + } - formatReader.setId( file.toString() ); - - MetadataRetrieve retrieve = (MetadataRetrieve)formatReader.getMetadataStore(); - - this.nImages = retrieve.getImageCount(); - this.nTp = formatReader.getSizeT(); - this.nCh = formatReader.getSizeC(); - this.sizeZ = formatReader.getSizeZ(); - this.sizeY = formatReader.getSizeY(); - this.sizeX = formatReader.getSizeX(); - for (int imageIndex = 0; imageIndex < nImages; imageIndex++) { - Length z = retrieve.getPlanePositionZ(imageIndex, 0); - Length y = retrieve.getPlanePositionY(imageIndex, 0); - Length x = retrieve.getPlanePositionX(imageIndex, 0); - Length resX = retrieve.getPixelsPhysicalSizeX(imageIndex); - Length resY = retrieve.getPixelsPhysicalSizeY(imageIndex); - Length resZ = retrieve.getPixelsPhysicalSizeZ(imageIndex); - double zz = z != null ? z.value(UNITS.MICROMETER).doubleValue() : 0; - double yy = y != null ? y.value(UNITS.MICROMETER).doubleValue() : 0; - double xx = x != null ? x.value(UNITS.MICROMETER).doubleValue() : 0; - double rZ = resZ != null ? resZ.value(UNITS.MICROMETER).doubleValue() : 0; - double rY = resY != null ? resY.value(UNITS.MICROMETER).doubleValue() : 0; - double rX = resX != null ? resX.value(UNITS.MICROMETER).doubleValue() : 0; - int imageChannels = retrieve.getChannelCount(imageIndex); - for (int chIndex = 0; chIndex < imageChannels; chIndex++) { - String chName = retrieve.getChannelName(imageIndex, chIndex); - tiles.add(new TileInfo( - (ti * nImages + imageIndex) * imageChannels + chIndex, - file, - viewIndex.tp, - ti, - imageIndex, - chIndex, chName, - viewIndex.ang, viewIndex.il, - sizeZ, sizeY, sizeX, - zz, yy, xx, - rZ, rY, rX)); + static class N5ViewSetupBuilder implements ViewSetupBuilder { + + private final SequenceDescription sequenceDescription; + private final URI n5ContainerURI; + private final Map viewIdToPath; + private final N5Reader n5Reader; + private final N5MultichannelLoader n5Loader; + + public N5ViewSetupBuilder(URI n5ContainerURI, TimePoints timePoints) { + this.sequenceDescription = new SequenceDescription( + timePoints, + /*view setups*/Collections.emptyList() + ); + this.n5ContainerURI = n5ContainerURI; + this.viewIdToPath = new HashMap<>(); + n5Reader = new N5FSReader(n5ContainerURI.toString()); + n5Loader = new N5MultichannelLoader( n5ContainerURI, StorageFormat.N5, sequenceDescription, viewIdToPath ); + } + + @Override + public SequenceDescription getSequenceDescription() { + return sequenceDescription; + } + + @Override + public N5ViewSetupBuilder setImgLoader() { + sequenceDescription.setImgLoader(n5Loader); + return this; + } + + @SuppressWarnings("unchecked") + @Override + public N5ViewSetupBuilder createViewSetups(List stackFiles) { + for (int i = 0; i < stackFiles.size(); i++) { + StackFile stackFile = stackFiles.get(i); + if ( Files.notExists(stackFile.getFilePath()) ) + { + continue; + } + Map pixelResolutions = n5Reader.getAttribute(".", "pixelResolution", Map.class); + VoxelDimensions voxelDimensions; + if (pixelResolutions != null) { + double[] res = ((List) pixelResolutions.getOrDefault("dimensions", Arrays.asList(1., 1., 1.))) + .stream() + .mapToDouble(d -> d) + .toArray(); + String resUnits = (String) pixelResolutions.getOrDefault("unit", "voxel"); + voxelDimensions = new FinalVoxelDimensions(resUnits, res); + } else { + voxelDimensions = new FinalVoxelDimensions("voxel", 1., 1., 1.); + } + Dimensions size; + if (n5Reader.exists(stackFile.relativeFilePath + "/s0")) { + long[] dims = n5Reader.getDatasetAttributes(stackFile.relativeFilePath + "/s0").getDimensions(); + size = new FinalDimensions(dims[0], dims[1], dims[2]); + } else { + long[] dims = n5Reader.getDatasetAttributes(stackFile.relativeFilePath).getDimensions(); + if (dims != null) { + size = new FinalDimensions(dims[0], dims[1], dims[2]); + } else { + System.out.println("Could not find dimensions attribute for " + stackFile.relativeFilePath); + size = null; } } - return tiles; - } catch (Exception e) { - throw new IllegalStateException("Could not read " + file, e); + ViewSetup vs = new ViewSetup( + i, // in this case view index coincides with stack file index + stackFile.relativeFilePath, + size, + voxelDimensions, + new Tile(stackFile.getTi()), + new Channel(stackFile.getCh()), + new Angle(stackFile.getAng()), + new Illumination(stackFile.getIl()) + ); + ((Map) sequenceDescription.getViewSetups()).put(i, vs); + viewIdToPath.put(new ViewId(stackFile.getTp(), i), stackFile.relativeFilePath); } +// try { +// DatasetAttributes attributes = setupImgLoader.getAttributes( sequenceDescription.getTimePoints().getTimePointsOrdered().get(0).getId() ); +// vs.setSize( new FinalDimensions( attributes.getDimensions() ) ); +// vs.setVoxelSize( new FinalVoxelDimensions("um", +// attributes.getBlockSize()[0], +// attributes.getBlockSize()[1], +// attributes.getBlockSize().length > 2 ? attributes.getBlockSize()[2] : 1) ); +// } catch (N5Exception e) { +// throw new IllegalStateException("Could not read attributes for " + vs, e); +// } + + + return this; } + } static class StackPattern { @@ -313,8 +418,7 @@ public SpimDatasetBuilder(String fileNamePattern ) this.fileNamePattern = new StackPattern(fileNamePattern); } - public SpimData2 createDataset(String imageDir) { - Path imagePath = Paths.get(imageDir); + public SpimData2 createDataset(URI imagePath) { List stackFiles = getStackFiles(imagePath); // collect timepoints from stack files @@ -322,38 +426,10 @@ public SpimData2 createDataset(String imageDir) { .map(si -> new TimePoint(si.viewIndex.tp)) .collect(Collectors.toSet()); - // create view setups - Map viewSetups = stackFiles.stream() - .flatMap(sf -> sf.loadTileMetadata().stream()) - .map(tileInfo -> { - Dimensions size = new FinalDimensions(tileInfo.sizeX, tileInfo.sizeY, tileInfo.sizeZ); - VoxelDimensions voxelSize = new FinalVoxelDimensions("um", tileInfo.resX, tileInfo.resY, tileInfo.resZ); - return new Tuple2<>( - tileInfo, - new ViewSetup( - tileInfo.tileIndex, - tileInfo.chName, - size, - voxelSize, - new Tile(tileInfo.imageIndex), - new Channel(tileInfo.chIndex), - new Angle(tileInfo.angle), - new Illumination(tileInfo.illumination) - ) - ); - }) - .collect(Collectors.toMap(t -> t._1, t -> t._2)); - - SequenceDescription sequenceDescription = new SequenceDescription( - new TimePoints(timePoints), - viewSetups.values(), - /*image loader*/null, - null // missing views not handled for now - ); - - ImgLoader imgLoader = createImageLoader(imagePath, viewSetups.keySet(), sequenceDescription); - - sequenceDescription.setImgLoader(imgLoader); + SequenceDescription sequenceDescription = createViewSetupBuilder(imagePath, new TimePoints(timePoints)) + .createViewSetups(stackFiles) + .setImgLoader() + .getSequenceDescription(); // get the min resolution from all calibrations double minResolution = DatasetCreationUtils.minResolution( @@ -368,7 +444,7 @@ public SpimData2 createDataset(String imageDir) { ViewInterestPoints viewInterestPoints = new ViewInterestPoints(); return new SpimData2( - imagePath.toUri(), + imagePath, sequenceDescription, viewRegistrations, viewInterestPoints, @@ -379,29 +455,22 @@ public SpimData2 createDataset(String imageDir) { ); } - private ImgLoader createImageLoader(Path imagePath, Collection tileInfos, SequenceDescription sd) { - Map, FileMapEntry> fileMap = new HashMap<>(); - for (TileInfo ti : tileInfos) { - ViewDescription vdI = sd.getViewDescription( ti.tp, ti.tileIndex ); - fileMap.put( vdI, new FileMapEntry(ti.filePath.toFile(), ti.imageIndex, ti.chIndex) ); - } - - return new FileMapImgLoaderLOCI( - fileMap, - sd, - false - ); - } - - private List getStackFiles(Path imagePath) + /** + * So far only local paths are supported. + * + * @param imageURI + * @return + */ + private List getStackFiles(URI imageURI) { int searchDepth = fileNamePattern.getSearchDepth(); try { + Path imagePath = Paths.get(imageURI); // get the files PathMatcher matcher = FileSystems.getDefault().getPathMatcher(fileNamePattern.getGlobPattern()); List fs = Files.walk( imagePath , searchDepth+1) .filter(path -> matcher.matches(imagePath.relativize(path))) - .map(p -> getStackFile(imagePath.relativize(p).toString(), p)) + .map(p -> getStackFile(imageURI, imagePath.relativize(p).toString())) .collect(Collectors.toList()); System.out.println(fs); return fs; @@ -410,18 +479,18 @@ private List getStackFiles(Path imagePath) } } - private StackFile getStackFile(String matchingPattern, Path filePath) + private StackFile getStackFile(URI imageURI, String imageRelativePath) { - Matcher m = fileNamePattern.regexPattern.matcher(matchingPattern); + Matcher m = fileNamePattern.regexPattern.matcher(imageRelativePath); if ( m.matches() ) { int tp = extractInt(fileNamePattern.hasKey("t") ? m.group("tp") : "0"); int ch = extractInt(fileNamePattern.hasKey("c") ? m.group("ch") : "0"); int il = extractInt(fileNamePattern.hasKey("i") ? m.group("il") : "0"); int ang = extractInt(fileNamePattern.hasKey("a") ? m.group("ang") : "0"); int ti = extractInt(fileNamePattern.hasKey("x") ? m.group("ti") : "0"); - return new StackFile(tp, ch, il, ang, ti, filePath); + return new StackFile(tp, ch, il, ang, ti, imageURI, imageRelativePath); } else { - throw new IllegalArgumentException(matchingPattern + " does not match " + fileNamePattern.sourcePattern + ". Refine the pattern and try again"); + throw new IllegalArgumentException(imageRelativePath + " does not match " + fileNamePattern.sourcePattern + ". Refine the pattern and try again"); } } @@ -433,4 +502,12 @@ int extractInt(String input) { return 0; } } + + private ViewSetupBuilder createViewSetupBuilder(URI imageURI, TimePoints timePoints) { + if ( imageURI.getScheme().equals("n5") || imageURI.getScheme().equals("file") && imageURI.getPath().contains(".n5") ) { + return new N5ViewSetupBuilder(imageURI, timePoints); + } else { + return new LOCIViewSetupBuilder(timePoints); + } + } } diff --git a/src/main/java/net/preibisch/mvrecon/dataset/XmlToN5MultichannelLoader.java b/src/main/java/net/preibisch/mvrecon/dataset/XmlToN5MultichannelLoader.java new file mode 100644 index 00000000..1c07745c --- /dev/null +++ b/src/main/java/net/preibisch/mvrecon/dataset/XmlToN5MultichannelLoader.java @@ -0,0 +1,63 @@ +package net.preibisch.mvrecon.dataset; + +import java.io.File; +import java.net.URI; +import java.util.HashMap; +import java.util.Map; + +import mpicbg.spim.data.XmlHelpers; +import mpicbg.spim.data.generic.sequence.AbstractSequenceDescription; +import mpicbg.spim.data.generic.sequence.ImgLoaderIo; +import mpicbg.spim.data.generic.sequence.XmlIoBasicImgLoader; +import mpicbg.spim.data.sequence.ViewId; +import net.preibisch.mvrecon.fiji.spimdata.imgloaders.AllenOMEZarrLoader; +import org.janelia.saalfeldlab.n5.universe.StorageFormat; +import org.jdom2.Element; + +import static mpicbg.spim.data.XmlKeys.IMGLOADER_FORMAT_ATTRIBUTE_NAME; + +@ImgLoaderIo( format = "bdv.multchimg.n5", type = N5MultichannelLoader.class ) +public class XmlToN5MultichannelLoader implements XmlIoBasicImgLoader { + @Override + public Element toXml(N5MultichannelLoader imgLoader, File basePath) { + final Element imgLoaderElement = new Element( "ImageLoader" ); + imgLoaderElement.setAttribute( IMGLOADER_FORMAT_ATTRIBUTE_NAME, "bdv.multchimg.n5" ); + imgLoaderElement.setAttribute( "version", "1.0" ); + + imgLoaderElement.addContent( XmlHelpers.pathElementURI( "n5", imgLoader.getN5URI(), basePath.toURI() )); + + final Element zgroupsElement = new Element( "n5groups" ); + + for ( final Map.Entry entry : imgLoader.getViewIdToPath().entrySet() ) + { + final Element n5groupElement = new Element("n5group"); + n5groupElement.setAttribute( "setup", String.valueOf( entry.getKey().getViewSetupId() ) ); + n5groupElement.setAttribute( "tp", String.valueOf( entry.getKey().getTimePointId() ) ); + n5groupElement.setAttribute( "path", String.valueOf( entry.getValue() ) ); + + zgroupsElement.addContent( n5groupElement ); + } + + imgLoaderElement.addContent( zgroupsElement ); + + return imgLoaderElement; + } + + @Override + public N5MultichannelLoader fromXml(Element elem, File basePath, AbstractSequenceDescription sequenceDescription) { + final Map n5groups = new HashMap<>(); + + URI uri = XmlHelpers.loadPathURI( elem, "n5", basePath.toURI() ); + + final Element n5groupsElem = elem.getChild( "n5groups" ); + for ( final Element c : n5groupsElem.getChildren( "n5group" ) ) + { + final int timepointId = Integer.parseInt( c.getAttributeValue( "tp" ) ); + final int setupId = Integer.parseInt( c.getAttributeValue( "setup" ) ); + final String path = c.getAttributeValue( "path" ); + n5groups.put( new ViewId( timepointId, setupId ), path ); + } + + return new N5MultichannelLoader(uri, StorageFormat.N5, sequenceDescription, n5groups); + } +} From b7df0610785b432ebb3260c3f3b8b21034e25def Mon Sep 17 00:00:00 2001 From: Cristian Goina Date: Fri, 19 Sep 2025 20:09:04 -0400 Subject: [PATCH 12/21] fixes for tile index --- .../bigstitcher/spark/CreateDataset.java | 20 +++-- .../spark/SparkPairwiseStitching.java | 10 ++- .../dataset/N5MultichannelProperties.java | 50 ++++++++++- .../mvrecon/dataset/SpimDatasetBuilder.java | 86 +++++++++---------- 4 files changed, 111 insertions(+), 55 deletions(-) diff --git a/src/main/java/net/preibisch/bigstitcher/spark/CreateDataset.java b/src/main/java/net/preibisch/bigstitcher/spark/CreateDataset.java index 79dd0d77..9f1efef9 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/CreateDataset.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/CreateDataset.java @@ -35,6 +35,18 @@ public Void call() throws Exception { URI xmlURI = URITools.toURI(xmlURIString); System.out.println("Save spimData with original tiles to " + xmlURI); + prepareSaveLocation(xmlURI); + new XmlIoSpimData2().save(spimData, xmlURI); + + return null; + } + + private SpimData2 createDataset() { + SpimDatasetBuilder spimDatasetBuilder = new SpimDatasetBuilder(inputPattern); + return spimDatasetBuilder.createDataset(URITools.toURI(inputPath)); + } + + private void prepareSaveLocation(URI xmlURI) { if (URITools.isFile( xmlURI )) { Path xmlPath = Paths.get(xmlURI); // create parent directories if necessary @@ -46,14 +58,6 @@ public Void call() throws Exception { } } } - new XmlIoSpimData2().save(spimData, xmlURI); - - return null; - } - - private SpimData2 createDataset() { - SpimDatasetBuilder spimDatasetBuilder = new SpimDatasetBuilder(inputPattern); - return spimDatasetBuilder.createDataset(URITools.toURI(inputPath)); } public static void main(final String... args) throws SpimDataException { diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SparkPairwiseStitching.java b/src/main/java/net/preibisch/bigstitcher/spark/SparkPairwiseStitching.java index 0a0b8162..68dcbed6 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/SparkPairwiseStitching.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/SparkPairwiseStitching.java @@ -30,6 +30,8 @@ import java.util.concurrent.Executors; import java.util.stream.Collectors; +import mpicbg.spim.data.registration.ViewTransform; +import mpicbg.spim.data.registration.ViewTransformAffine; import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; @@ -331,13 +333,19 @@ public Void call() throws Exception System.out.println( new Date( System.currentTimeMillis() ) + ": Remaining pairs: " + results.size() ); - // update StitchingResults with Results for ( final PairwiseStitchingResult< ViewId > psr : results ) { if (psr == null) continue; + // update the registrations transformations + psr.pair().getA().getViews().forEach( viewId -> { + dataGlobal.getViewRegistrations().getViewRegistration(viewId) + .preconcatenateTransform(new ViewTransformAffine( + "Stitching Transform", + new AffineTransform3D().concatenate(psr.getInverseTransform()))); + }); dataGlobal.getStitchingResults().setPairwiseResultForPair(psr.pair(), psr ); } diff --git a/src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelProperties.java b/src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelProperties.java index a39f1efb..2556741c 100644 --- a/src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelProperties.java +++ b/src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelProperties.java @@ -22,7 +22,9 @@ */ package net.preibisch.mvrecon.dataset; +import java.util.ArrayList; import java.util.Arrays; +import java.util.List; import java.util.Map; import bdv.img.n5.N5Properties; @@ -37,6 +39,7 @@ public class N5MultichannelProperties implements N5Properties private final AbstractSequenceDescription< ?, ?, ? > sequenceDescription; private final Map< ViewId, String > viewIdToPath; + private int numMipmapLevels; public N5MultichannelProperties( final AbstractSequenceDescription< ?, ?, ? > sequenceDescription, @@ -44,6 +47,7 @@ public N5MultichannelProperties( { this.sequenceDescription = sequenceDescription; this.viewIdToPath = viewIdToPath; + this.numMipmapLevels = -1; } private String getPath( final int setupId, final int timepointId ) @@ -60,7 +64,7 @@ public String getDatasetPath( final int setupId, final int timepointId, final in @Override public DataType getDataType( final N5Reader n5, final int setupId ) { - return getDataType( this, n5, setupId ); + return N5MultichannelProperties.getDataType( this, n5, setupId ); } @Override @@ -77,6 +81,33 @@ public long[] getDimensions( final N5Reader n5, final int setupId, final int tim return Arrays.copyOf( dimensions, 3 ); } + public T getRootAttribute( N5Reader n5, String attributeKey, Class attributeType ) + { + return n5.getAttribute("", attributeKey, attributeType); + } + + public T getAttribute( N5Reader n5, int setupId, int timepointId, int level, String attributeKey, Class attributeType ) + { + String path; + if (level >= 0) { + path = getDatasetPath( setupId, timepointId, level ); + } else { + path = getPath( setupId, timepointId ); + } + return n5.getAttribute(path, attributeKey, attributeType); + } + + private int getNumMipmapLevels( final N5Reader n5, final int setupId, final int timepointId ) + { + if ( numMipmapLevels >=0 ) + return numMipmapLevels; + + final String path = getPath( setupId, timepointId ); + String[] subgroups = n5.list(path); + numMipmapLevels = subgroups != null ? subgroups.length : 0; + return numMipmapLevels; + } + // // static methods // @@ -103,7 +134,20 @@ private static double[][] getMipMapResolutions(final N5MultichannelProperties n5 final int timePointId = getFirstAvailableTimepointId( n5properties.sequenceDescription, setupId ); // read scales and pixelResolution attributes from the base container and build the mipmap resolutions from that - - return new double[][] { { 1, 1, 1 } }; // default + List scales = new ArrayList<>(); + int numLevels = n5properties.getNumMipmapLevels(n5, setupId, timePointId); + for (int level = 0; level < numLevels; level++ ) { + double[] pixelResolution = n5properties.getAttribute(n5, setupId, timePointId, level, "pixelResolution", double[].class); + double[] downSamplingFactors = n5properties.getAttribute(n5, setupId, timePointId, level, "downsamplingFactors", double[].class); + if (pixelResolution != null) { + if (downSamplingFactors != null) { + for (int d = 0; d < pixelResolution.length && d < downSamplingFactors.length; d++) { + pixelResolution[d] *= downSamplingFactors[d]; + } + } + scales.add(pixelResolution); + } + } + return !scales.isEmpty() ? scales.toArray( new double[0][]) : new double[][] { { 1, 1, 1 } }; } } diff --git a/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java b/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java index b30ed9ab..add85af4 100644 --- a/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java +++ b/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java @@ -53,7 +53,6 @@ import ome.units.UNITS; import ome.units.quantity.Length; import org.apache.commons.lang3.builder.ToStringBuilder; -import org.janelia.saalfeldlab.n5.DatasetAttributes; import org.janelia.saalfeldlab.n5.N5FSReader; import org.janelia.saalfeldlab.n5.N5Reader; import org.janelia.saalfeldlab.n5.universe.StorageFormat; @@ -193,7 +192,11 @@ public LOCIViewSetupBuilder setImgLoader() { for (ViewSetup vs : sequenceDescription.getViewSetupsOrdered()) { StackFile stackFile = viewToStackFileMap.get(vs.getId()); ViewDescription vdI = sequenceDescription.getViewDescription( stackFile.getTp(), vs.getId() ); - fileMap.put( vdI, new FileMapEntry(stackFile.getFilePath().toFile(), vs.getTile().getId(), vs.getChannel().getId()) ); + FileMapEntry fileMapEntry = new FileMapEntry( + stackFile.getFilePath().toFile(), + vs.getTile().getId() - (stackFile.getTi() * stackFile.nImages), // recreate the image index within the file + vs.getChannel().getId()); + fileMap.put( vdI, fileMapEntry); } sequenceDescription.setImgLoader(new FileMapImgLoaderLOCI( @@ -207,11 +210,13 @@ public LOCIViewSetupBuilder setImgLoader() { @SuppressWarnings("unchecked") @Override public LOCIViewSetupBuilder createViewSetups(List stackFiles) { - stackFiles.forEach( stackFile -> { + int nfiles = stackFiles.size(); + for (int sfi = 0; sfi < nfiles; sfi++) { + StackFile stackFile = stackFiles.get(sfi); File tileFile = stackFile.getFilePath().toFile(); if ( !tileFile.exists() ) { - return; + continue; } IFormatReader formatReader = new ChannelSeparator(); try { @@ -221,7 +226,7 @@ public LOCIViewSetupBuilder createViewSetups(List stackFiles) { } catch (IOException e) { e.printStackTrace(); } - return; + continue; } formatReader.setId( tileFile.toString() ); @@ -235,23 +240,37 @@ public LOCIViewSetupBuilder createViewSetups(List stackFiles) { stackFile.sizeY = formatReader.getSizeY(); stackFile.sizeX = formatReader.getSizeX(); for (int imageIndex = 0; imageIndex < stackFile.nImages; imageIndex++) { + Length offsetX = retrieve.getPlanePositionX(imageIndex, 0); + Length offsetY = retrieve.getPlanePositionY(imageIndex, 0); + Length offsetZ = retrieve.getPlanePositionZ(imageIndex, 0); Length resX = retrieve.getPixelsPhysicalSizeX(imageIndex); Length resY = retrieve.getPixelsPhysicalSizeY(imageIndex); Length resZ = retrieve.getPixelsPhysicalSizeZ(imageIndex); - double rZ = resZ != null ? resZ.value(UNITS.MICROMETER).doubleValue() : 0; - double rY = resY != null ? resY.value(UNITS.MICROMETER).doubleValue() : 0; + + double oX = offsetX != null ? offsetX.value(UNITS.MICROMETER).doubleValue() : 0; + double oY = offsetY != null ? offsetY.value(UNITS.MICROMETER).doubleValue() : 0; + double oZ = offsetZ != null ? offsetZ.value(UNITS.MICROMETER).doubleValue() : 0; double rX = resX != null ? resX.value(UNITS.MICROMETER).doubleValue() : 0; + double rY = resY != null ? resY.value(UNITS.MICROMETER).doubleValue() : 0; + double rZ = resZ != null ? resZ.value(UNITS.MICROMETER).doubleValue() : 0; + int imageChannels = retrieve.getChannelCount(imageIndex); for (int chIndex = 0; chIndex < imageChannels; chIndex++) { String chName = retrieve.getChannelName(imageIndex, chIndex); - int viewIndex = (stackFile.getTi() * stackFile.nImages + imageIndex) * imageChannels + chIndex; + // currently viewIndex is only based on the number of images and channels + // but a correct implementation would also consider timepoints, illuminations and angles + // for now I am ignoring them because so far we never needed them. + int viewIndex = chIndex * nfiles * stackFile.nImages + sfi * stackFile.nImages + imageIndex; + Tile tile = new Tile(stackFile.getTi() * stackFile.nImages + imageIndex); + tile.setLocation(new double[]{oX, oY, oZ}); + Channel channel = new Channel(chIndex, chName); ViewSetup vs = new ViewSetup( viewIndex, - chName, + String.valueOf(viewIndex), new FinalDimensions(stackFile.sizeX, stackFile.sizeY, stackFile.sizeZ), new FinalVoxelDimensions("um", rX, rY, rZ), - new Tile(imageIndex), - new Channel(chIndex), + tile, + channel, new Angle(stackFile.getAng()), new Illumination(stackFile.getIl()) ); @@ -262,28 +281,28 @@ public LOCIViewSetupBuilder createViewSetups(List stackFiles) { } catch (Exception e) { throw new IllegalStateException("Could not read " + stackFile, e); } - }); + } return this; } } static class N5ViewSetupBuilder implements ViewSetupBuilder { - private final SequenceDescription sequenceDescription; private final URI n5ContainerURI; + private final SequenceDescription sequenceDescription; private final Map viewIdToPath; private final N5Reader n5Reader; - private final N5MultichannelLoader n5Loader; + private final N5MultichannelProperties n5MultichannelProperties; public N5ViewSetupBuilder(URI n5ContainerURI, TimePoints timePoints) { + this.n5ContainerURI = n5ContainerURI; this.sequenceDescription = new SequenceDescription( timePoints, /*view setups*/Collections.emptyList() ); - this.n5ContainerURI = n5ContainerURI; this.viewIdToPath = new HashMap<>(); n5Reader = new N5FSReader(n5ContainerURI.toString()); - n5Loader = new N5MultichannelLoader( n5ContainerURI, StorageFormat.N5, sequenceDescription, viewIdToPath ); + n5MultichannelProperties = new N5MultichannelProperties(sequenceDescription, viewIdToPath); } @Override @@ -293,7 +312,9 @@ public SequenceDescription getSequenceDescription() { @Override public N5ViewSetupBuilder setImgLoader() { - sequenceDescription.setImgLoader(n5Loader); + sequenceDescription.setImgLoader( + new N5MultichannelLoader( n5ContainerURI, StorageFormat.N5, sequenceDescription, viewIdToPath ) + ); return this; } @@ -306,7 +327,9 @@ public N5ViewSetupBuilder createViewSetups(List stackFiles) { { continue; } - Map pixelResolutions = n5Reader.getAttribute(".", "pixelResolution", Map.class); + viewIdToPath.put(new ViewId(stackFile.getTp(), i), stackFile.relativeFilePath); + + Map pixelResolutions = n5MultichannelProperties.getRootAttribute(n5Reader, "pixelResolution", Map.class); VoxelDimensions voxelDimensions; if (pixelResolutions != null) { double[] res = ((List) pixelResolutions.getOrDefault("dimensions", Arrays.asList(1., 1., 1.))) @@ -318,19 +341,8 @@ public N5ViewSetupBuilder createViewSetups(List stackFiles) { } else { voxelDimensions = new FinalVoxelDimensions("voxel", 1., 1., 1.); } - Dimensions size; - if (n5Reader.exists(stackFile.relativeFilePath + "/s0")) { - long[] dims = n5Reader.getDatasetAttributes(stackFile.relativeFilePath + "/s0").getDimensions(); - size = new FinalDimensions(dims[0], dims[1], dims[2]); - } else { - long[] dims = n5Reader.getDatasetAttributes(stackFile.relativeFilePath).getDimensions(); - if (dims != null) { - size = new FinalDimensions(dims[0], dims[1], dims[2]); - } else { - System.out.println("Could not find dimensions attribute for " + stackFile.relativeFilePath); - size = null; - } - } + long[] dims = n5MultichannelProperties.getDimensions(n5Reader, i, stackFile.getTp(), 0); + Dimensions size = new FinalDimensions(dims[0], dims[1], dims[2]); ViewSetup vs = new ViewSetup( i, // in this case view index coincides with stack file index stackFile.relativeFilePath, @@ -342,19 +354,7 @@ public N5ViewSetupBuilder createViewSetups(List stackFiles) { new Illumination(stackFile.getIl()) ); ((Map) sequenceDescription.getViewSetups()).put(i, vs); - viewIdToPath.put(new ViewId(stackFile.getTp(), i), stackFile.relativeFilePath); } -// try { -// DatasetAttributes attributes = setupImgLoader.getAttributes( sequenceDescription.getTimePoints().getTimePointsOrdered().get(0).getId() ); -// vs.setSize( new FinalDimensions( attributes.getDimensions() ) ); -// vs.setVoxelSize( new FinalVoxelDimensions("um", -// attributes.getBlockSize()[0], -// attributes.getBlockSize()[1], -// attributes.getBlockSize().length > 2 ? attributes.getBlockSize()[2] : 1) ); -// } catch (N5Exception e) { -// throw new IllegalStateException("Could not read attributes for " + vs, e); -// } - return this; } From 8bbb5b597c2ca6669df5eef87f95374c05ed5818 Mon Sep 17 00:00:00 2001 From: Cristian Goina Date: Sat, 20 Sep 2025 19:20:17 -0400 Subject: [PATCH 13/21] removed Takashi's code --- .../bigstitcher/spark/SparkAffineFusion.java | 15 ++------------- .../mvrecon/dataset/SpimDatasetBuilder.java | 11 ++++++----- 2 files changed, 8 insertions(+), 18 deletions(-) diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java b/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java index 961521ce..c11eb7b4 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java @@ -288,10 +288,6 @@ else if ( outputPathURIString.toLowerCase().endsWith( ".h5" ) || outPathURI.toSt final DataType dataType = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/DataType", DataType.class ); - final long[] orig_bbMin = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/Boundingbox_min", long[].class ); - if ( !Double.isNaN( anisotropyFactor ) ) - orig_bbMin[ 2 ] = Math.round( Math.floor( orig_bbMin[ 2 ] * anisotropyFactor ) ); - System.out.println( "FusionFormat: " + fusionFormat ); System.out.println( "FusionType: " + fusionType ); System.out.println( "Input XML: " + xmlURI ); @@ -499,13 +495,6 @@ else if ( intensityN5PathURIString.toLowerCase().endsWith( ".h5" ) || intensityN { final SpimData2 dataLocal = Spark.getSparkJobSpimData2(xmlURI); - final HashMap< ViewId, AffineTransform3D > orig_registrations = - TransformVirtual.adjustAllTransforms( - viewIds, - dataLocal.getViewRegistrations().getViewRegistrations(), - Double.NaN, - Double.NaN ); - final HashMap< ViewId, AffineTransform3D > registrations = TransformVirtual.adjustAllTransforms( viewIds, @@ -542,7 +531,7 @@ else if ( dataType == DataType.UINT16 ) // The min coordinates of the block that this job renders (in pixels) final int n = gridBlock[ 0 ].length; final long[] superBlockOffset = new long[ n ]; - Arrays.setAll( superBlockOffset, d -> gridBlock[ 0 ][ d ] + orig_bbMin[ d ] ); + Arrays.setAll( superBlockOffset, d -> gridBlock[ 0 ][ d ] + bbMin[ d ] ); // The size of the block that this job renders (in pixels) final long[] superBlockSize = gridBlock[ 1 ]; @@ -557,7 +546,7 @@ else if ( dataType == DataType.UINT16 ) Arrays.setAll( fusedBlockMax, d -> superBlockOffset[ d ] + superBlockSize[ d ] - 1 ); final List< ViewId > overlappingViews = - OverlappingViews.findOverlappingViews( dataLocal, viewIds, orig_registrations, fusedBlock ); + OverlappingViews.findOverlappingViews( dataLocal, viewIds, registrations, fusedBlock ); if ( overlappingViews.size() == 0 ) return gridBlock; diff --git a/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java b/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java index add85af4..82f8f3ad 100644 --- a/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java +++ b/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java @@ -286,7 +286,7 @@ public LOCIViewSetupBuilder createViewSetups(List stackFiles) { } } - static class N5ViewSetupBuilder implements ViewSetupBuilder { + static class N5MultichannelViewSetupBuilder implements ViewSetupBuilder { private final URI n5ContainerURI; private final SequenceDescription sequenceDescription; @@ -294,7 +294,7 @@ static class N5ViewSetupBuilder implements ViewSetupBuilder { private final N5Reader n5Reader; private final N5MultichannelProperties n5MultichannelProperties; - public N5ViewSetupBuilder(URI n5ContainerURI, TimePoints timePoints) { + public N5MultichannelViewSetupBuilder(URI n5ContainerURI, TimePoints timePoints) { this.n5ContainerURI = n5ContainerURI; this.sequenceDescription = new SequenceDescription( timePoints, @@ -311,7 +311,7 @@ public SequenceDescription getSequenceDescription() { } @Override - public N5ViewSetupBuilder setImgLoader() { + public N5MultichannelViewSetupBuilder setImgLoader() { sequenceDescription.setImgLoader( new N5MultichannelLoader( n5ContainerURI, StorageFormat.N5, sequenceDescription, viewIdToPath ) ); @@ -320,7 +320,7 @@ public N5ViewSetupBuilder setImgLoader() { @SuppressWarnings("unchecked") @Override - public N5ViewSetupBuilder createViewSetups(List stackFiles) { + public N5MultichannelViewSetupBuilder createViewSetups(List stackFiles) { for (int i = 0; i < stackFiles.size(); i++) { StackFile stackFile = stackFiles.get(i); if ( Files.notExists(stackFile.getFilePath()) ) @@ -441,6 +441,7 @@ public SpimData2 createDataset(URI imagePath) { sequenceDescription.getViewDescriptions(), minResolution ); + ViewInterestPoints viewInterestPoints = new ViewInterestPoints(); return new SpimData2( @@ -505,7 +506,7 @@ int extractInt(String input) { private ViewSetupBuilder createViewSetupBuilder(URI imageURI, TimePoints timePoints) { if ( imageURI.getScheme().equals("n5") || imageURI.getScheme().equals("file") && imageURI.getPath().contains(".n5") ) { - return new N5ViewSetupBuilder(imageURI, timePoints); + return new N5MultichannelViewSetupBuilder(imageURI, timePoints); } else { return new LOCIViewSetupBuilder(timePoints); } From c1f88e1874e15853d6ade3e7b267bf5f013739ac Mon Sep 17 00:00:00 2001 From: Cristian Goina Date: Sat, 20 Sep 2025 21:01:51 -0400 Subject: [PATCH 14/21] print voxel dimensions --- .../net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java b/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java index 82f8f3ad..68f30680 100644 --- a/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java +++ b/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java @@ -253,7 +253,8 @@ public LOCIViewSetupBuilder createViewSetups(List stackFiles) { double rX = resX != null ? resX.value(UNITS.MICROMETER).doubleValue() : 0; double rY = resY != null ? resY.value(UNITS.MICROMETER).doubleValue() : 0; double rZ = resZ != null ? resZ.value(UNITS.MICROMETER).doubleValue() : 0; - + VoxelDimensions voxelDimensions = new FinalVoxelDimensions("um", rX, rY, rZ); + System.out.println("Voxel dimensions: " + voxelDimensions); int imageChannels = retrieve.getChannelCount(imageIndex); for (int chIndex = 0; chIndex < imageChannels; chIndex++) { String chName = retrieve.getChannelName(imageIndex, chIndex); @@ -268,7 +269,7 @@ public LOCIViewSetupBuilder createViewSetups(List stackFiles) { viewIndex, String.valueOf(viewIndex), new FinalDimensions(stackFile.sizeX, stackFile.sizeY, stackFile.sizeZ), - new FinalVoxelDimensions("um", rX, rY, rZ), + voxelDimensions, tile, channel, new Angle(stackFile.getAng()), @@ -341,6 +342,7 @@ public N5MultichannelViewSetupBuilder createViewSetups(List stackFile } else { voxelDimensions = new FinalVoxelDimensions("voxel", 1., 1., 1.); } + System.out.println("Voxel dimensions: " + voxelDimensions); long[] dims = n5MultichannelProperties.getDimensions(n5Reader, i, stackFile.getTp(), 0); Dimensions size = new FinalDimensions(dims[0], dims[1], dims[2]); ViewSetup vs = new ViewSetup( From 530b006768ff2e9d1f04d5244086a752af0ca0c7 Mon Sep 17 00:00:00 2001 From: Cristian Goina Date: Mon, 22 Sep 2025 09:28:57 -0400 Subject: [PATCH 15/21] try again Takashi's fix --- pom.xml | 2 +- .../bigstitcher/spark/SparkAffineFusion.java | 15 +++++++++++++-- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index f34a7f3a..75aa9c16 100644 --- a/pom.xml +++ b/pom.xml @@ -100,7 +100,7 @@ 0.18.0 10.6.4 2.3.5 - 7.0.5 + 7.0.6 2.5.0 2.3.0 diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java b/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java index c11eb7b4..961521ce 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java @@ -288,6 +288,10 @@ else if ( outputPathURIString.toLowerCase().endsWith( ".h5" ) || outPathURI.toSt final DataType dataType = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/DataType", DataType.class ); + final long[] orig_bbMin = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/Boundingbox_min", long[].class ); + if ( !Double.isNaN( anisotropyFactor ) ) + orig_bbMin[ 2 ] = Math.round( Math.floor( orig_bbMin[ 2 ] * anisotropyFactor ) ); + System.out.println( "FusionFormat: " + fusionFormat ); System.out.println( "FusionType: " + fusionType ); System.out.println( "Input XML: " + xmlURI ); @@ -495,6 +499,13 @@ else if ( intensityN5PathURIString.toLowerCase().endsWith( ".h5" ) || intensityN { final SpimData2 dataLocal = Spark.getSparkJobSpimData2(xmlURI); + final HashMap< ViewId, AffineTransform3D > orig_registrations = + TransformVirtual.adjustAllTransforms( + viewIds, + dataLocal.getViewRegistrations().getViewRegistrations(), + Double.NaN, + Double.NaN ); + final HashMap< ViewId, AffineTransform3D > registrations = TransformVirtual.adjustAllTransforms( viewIds, @@ -531,7 +542,7 @@ else if ( dataType == DataType.UINT16 ) // The min coordinates of the block that this job renders (in pixels) final int n = gridBlock[ 0 ].length; final long[] superBlockOffset = new long[ n ]; - Arrays.setAll( superBlockOffset, d -> gridBlock[ 0 ][ d ] + bbMin[ d ] ); + Arrays.setAll( superBlockOffset, d -> gridBlock[ 0 ][ d ] + orig_bbMin[ d ] ); // The size of the block that this job renders (in pixels) final long[] superBlockSize = gridBlock[ 1 ]; @@ -546,7 +557,7 @@ else if ( dataType == DataType.UINT16 ) Arrays.setAll( fusedBlockMax, d -> superBlockOffset[ d ] + superBlockSize[ d ] - 1 ); final List< ViewId > overlappingViews = - OverlappingViews.findOverlappingViews( dataLocal, viewIds, registrations, fusedBlock ); + OverlappingViews.findOverlappingViews( dataLocal, viewIds, orig_registrations, fusedBlock ); if ( overlappingViews.size() == 0 ) return gridBlock; From 00bdcee745e07903a9cfe28b0c333db3ff179e23 Mon Sep 17 00:00:00 2001 From: Cristian Goina Date: Mon, 22 Sep 2025 09:44:05 -0400 Subject: [PATCH 16/21] downgraded multiview-reconstruction --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 75aa9c16..f34a7f3a 100644 --- a/pom.xml +++ b/pom.xml @@ -100,7 +100,7 @@ 0.18.0 10.6.4 2.3.5 - 7.0.6 + 7.0.5 2.5.0 2.3.0 From 79f2b90bb39b4c76b5a61488b07b515ac51e2457 Mon Sep 17 00:00:00 2001 From: Cristian Goina Date: Sat, 27 Sep 2025 15:41:07 -0400 Subject: [PATCH 17/21] merges from main --- .../bigstitcher/spark/SparkAffineFusion.java | 15 ++------------- 1 file changed, 2 insertions(+), 13 deletions(-) diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java b/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java index 1fd9c7a3..ae0f1f1e 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java @@ -288,10 +288,6 @@ else if ( outputPathURIString.toLowerCase().endsWith( ".h5" ) || outPathURI.toSt final DataType dataType = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/DataType", DataType.class ); - final long[] orig_bbMin = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/Boundingbox_min", long[].class ); - if ( !Double.isNaN( anisotropyFactor ) ) - orig_bbMin[ 2 ] = Math.round( Math.floor( orig_bbMin[ 2 ] * anisotropyFactor ) ); - System.out.println( "FusionFormat: " + fusionFormat ); System.out.println( "FusionType: " + fusionType ); System.out.println( "Input XML: " + xmlURI ); @@ -499,13 +495,6 @@ else if ( intensityN5PathURIString.toLowerCase().endsWith( ".h5" ) || intensityN { final SpimData2 dataLocal = Spark.getSparkJobSpimData2(xmlURI); - final HashMap< ViewId, AffineTransform3D > orig_registrations = - TransformVirtual.adjustAllTransforms( - viewIds, - dataLocal.getViewRegistrations().getViewRegistrations(), - Double.NaN, - Double.NaN ); - final HashMap< ViewId, AffineTransform3D > registrations = TransformVirtual.adjustAllTransforms( viewIds, @@ -542,7 +531,7 @@ else if ( dataType == DataType.UINT16 ) // The min coordinates of the block that this job renders (in pixels) final int n = gridBlock[ 0 ].length; final long[] superBlockOffset = new long[ n ]; - Arrays.setAll( superBlockOffset, d -> gridBlock[ 0 ][ d ] + orig_bbMin[ d ] ); + Arrays.setAll( superBlockOffset, d -> gridBlock[ 0 ][ d ] + bbMin[ d ] ); // The size of the block that this job renders (in pixels) final long[] superBlockSize = gridBlock[ 1 ]; @@ -557,7 +546,7 @@ else if ( dataType == DataType.UINT16 ) Arrays.setAll( fusedBlockMax, d -> superBlockOffset[ d ] + superBlockSize[ d ] - 1 ); final List< ViewId > overlappingViews = - OverlappingViews.findOverlappingViews( dataLocal, viewIds, orig_registrations, fusedBlock ); + OverlappingViews.findOverlappingViews( dataLocal, viewIds, registrations, fusedBlock ); if ( overlappingViews.size() == 0 ) return gridBlock; From 210c86a789c80a6230f9e2dc0f2e8dce538e9d12 Mon Sep 17 00:00:00 2001 From: Cristian Goina Date: Tue, 28 Oct 2025 16:14:42 -0400 Subject: [PATCH 18/21] use multiview reconstruction 8.0.1-snapshot to test OME scale values --- pom.xml | 2 +- .../bigstitcher/spark/CreateFusionContainer.java | 8 +++----- .../preibisch/bigstitcher/spark/SparkAffineFusion.java | 6 ------ .../net/preibisch/bigstitcher/spark/SparkResaveN5.java | 2 +- 4 files changed, 5 insertions(+), 13 deletions(-) diff --git a/pom.xml b/pom.xml index 8e2e35b4..58e66549 100644 --- a/pom.xml +++ b/pom.xml @@ -103,7 +103,7 @@ 1.0.2 2.3.5 - 8.0.0 + 8.0.1-SNAPSHOT 2.5.0 2.3.0 diff --git a/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java b/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java index e53fbe9e..f3749266 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java @@ -377,10 +377,8 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.ZARR ) (level) -> MipmapTransforms.getMipmapTransformDefault( mrInfo[level].absoluteDownsamplingDouble() ); // extract the resolution of the s0 export - // TODO: this is inaccurate, we should actually estimate it from the final transformn that is applied - // TODO: this is a hack (returns 1,1,1) so the export downsampling pyramid is working - final VoxelDimensions vx = new FinalVoxelDimensions( "micrometer", new double[] { 1, 1, 1 } );// dataGlobal.getSequenceDescription().getViewSetupsOrdered().iterator().next().getVoxelSize(); - final double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( vx, anisotropyFactor, Double.NaN ); + final VoxelDimensions vx = dataGlobal.getSequenceDescription().getViewSetupsOrdered().iterator().next().getVoxelSize(); + final double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( vx ); System.out.println( "Resolution of level 0: " + Util.printCoordinates( resolutionS0 ) + " " + "micrometer" ); //vx.unit() might not be OME-ZARR compatiblevx.unit() ); @@ -427,7 +425,7 @@ else if ( storageType == StorageFormat.ZARR ) // TODO: this is inaccurate, we should actually estimate it from the final transformn that is applied // TODO: this is a hack (returns 1,1,1) so the export downsampling pyramid is working final VoxelDimensions vx = new FinalVoxelDimensions( "micrometer", new double[] { 1, 1, 1 } );// dataGlobal.getSequenceDescription().getViewSetupsOrdered().iterator().next().getVoxelSize(); - final double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( vx, anisotropyFactor, Double.NaN ); + final double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( vx ); System.out.println( "Resolution of level 0: " + Util.printCoordinates( resolutionS0 ) + " " + "m" ); //vx.unit() might not be OME-ZARR compatiblevx.unit() ); diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java b/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java index 4ea40fc1..98f547aa 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java @@ -153,7 +153,6 @@ public enum DataTypeFusion @Option(names = { "--prefetch" }, description = "prefetch all blocks required for fusion in each Spark job using unlimited threads, useful in cloud environments (default: false)") protected boolean prefetch = false; - // TODO: add support for loading coefficients during fusion @CommandLine.Option(names = { "--intensityN5Path" }, description = "N5/ZARR/HDF5 base path for loading coefficients (e.g. s3://myBucket/coefficients.n5)") private String intensityN5PathURIString = null; @@ -553,11 +552,7 @@ else if ( dataType == DataType.UINT16 ) return gridBlock; // load intensity correction coefficients for all overlapping views - - final Map< ViewId, Coefficients > coefficients; - - if ( intensityN5PathURI != null ) { coefficients = new HashMap<>(); @@ -611,7 +606,6 @@ else if ( dataType == DataType.UINT16 ) System.out.println( "Fusing block: offset=" + Util.printCoordinates( gridBlock[0] ) + ", dimension=" + Util.printCoordinates( gridBlock[1] ) ); // returns a zero-min interval - //blockSupplier = BlkAffineFusion.init( blockSupplier = BlkAffineFusion.initWithIntensityCoefficients( conv, dataLocal.getSequenceDescription().getImgLoader(), diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java b/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java index fdbf0377..2244a11c 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java @@ -220,7 +220,7 @@ public Void call() throws Exception viewId, dataTypes.get( viewId.getViewSetupId() ), dimensions.get( viewId.getViewSetupId() ), - //dataGlobal.getSequenceDescription().getViewDescription( viewId ).getViewSetup().getVoxelSize().dimensionsAsDoubleArray(), // TODO: this is a hack for now + dataGlobal.getSequenceDescription().getViewDescription( viewId ).getViewSetup().getVoxelSize().dimensionsAsDoubleArray(), compression, blockSize, downsamplings); From e1f2d268b6708a5e142cc864c94fa80a0af01852 Mon Sep 17 00:00:00 2001 From: Cristian Goina Date: Tue, 28 Oct 2025 19:06:58 -0400 Subject: [PATCH 19/21] changes based on multiview reconstruction scale fixes --- pom.xml | 2 +- .../bigstitcher/spark/CreateFusionContainer.java | 12 +++++------- .../bigstitcher/spark/SparkAffineFusion.java | 7 ------- .../preibisch/bigstitcher/spark/SparkResaveN5.java | 2 +- 4 files changed, 7 insertions(+), 16 deletions(-) diff --git a/pom.xml b/pom.xml index 8e2e35b4..58e66549 100644 --- a/pom.xml +++ b/pom.xml @@ -103,7 +103,7 @@ 1.0.2 2.3.5 - 8.0.0 + 8.0.1-SNAPSHOT 2.5.0 2.3.0 diff --git a/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java b/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java index f83178de..22331983 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java @@ -59,7 +59,7 @@ public class CreateFusionContainer extends AbstractBasic implements Callable coefficients; - - if ( intensityN5PathURI != null ) { coefficients = new HashMap<>(); @@ -598,7 +592,6 @@ else if ( dataType == DataType.UINT16 ) System.out.println( "Fusing block: offset=" + Util.printCoordinates( gridBlock[0] ) + ", dimension=" + Util.printCoordinates( gridBlock[1] ) ); // returns a zero-min interval - //blockSupplier = BlkAffineFusion.init( blockSupplier = BlkAffineFusion.initWithIntensityCoefficients( conv, dataLocal.getSequenceDescription().getImgLoader(), diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java b/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java index 45c64233..c708b8f8 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java @@ -248,7 +248,7 @@ public Void call() throws Exception viewId, dataTypes.get( viewId.getViewSetupId() ), dimensions.get( viewId.getViewSetupId() ), - //dataGlobal.getSequenceDescription().getViewDescription( viewId ).getViewSetup().getVoxelSize().dimensionsAsDoubleArray(), // TODO: this is a hack for now + dataGlobal.getSequenceDescription().getViewDescription( viewId ).getViewSetup().getVoxelSize().dimensionsAsDoubleArray(), compression, blockSize, downsamplings); From fa6bd3bd0b1e6dd16183656685bd2ee886a4be2a Mon Sep 17 00:00:00 2001 From: Cristian Goina Date: Fri, 31 Oct 2025 11:09:38 -0400 Subject: [PATCH 20/21] use the average calibration --- .../spark/CreateFusionContainer.java | 60 ++++++++++++++----- .../bigstitcher/spark/SparkAffineFusion.java | 14 +---- .../bigstitcher/spark/SparkResaveN5.java | 9 +-- 3 files changed, 53 insertions(+), 30 deletions(-) diff --git a/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java b/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java index f3749266..a4930d8f 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java @@ -6,12 +6,21 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.Callable; import java.util.function.Function; +import mpicbg.spim.data.generic.base.Entity; +import mpicbg.spim.data.registration.ViewRegistrations; +import mpicbg.spim.data.sequence.SequenceDescription; +import net.imglib2.util.Pair; +import net.imglib2.util.ValuePair; +import net.preibisch.mvrecon.process.interestpointregistration.pairwise.constellation.grouping.Group; import org.janelia.saalfeldlab.n5.Compression; import org.janelia.saalfeldlab.n5.DataType; import org.janelia.saalfeldlab.n5.N5Writer; @@ -117,7 +126,10 @@ public class CreateFusionContainer extends AbstractBasic implements Callable levelToMipmapTransform = (level) -> MipmapTransforms.getMipmapTransformDefault( mrInfo[level].absoluteDownsamplingDouble() ); + updateAnisotropyAndCalibration(dataGlobal, viewIdsGlobal); // extract the resolution of the s0 export - final VoxelDimensions vx = dataGlobal.getSequenceDescription().getViewSetupsOrdered().iterator().next().getVoxelSize(); - final double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( vx ); + final double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( cal, avgAnisotropy, Double.NaN ); - System.out.println( "Resolution of level 0: " + Util.printCoordinates( resolutionS0 ) + " " + "micrometer" ); //vx.unit() might not be OME-ZARR compatiblevx.unit() ); + System.out.println( "Resolution of level 0: " + Util.printCoordinates( resolutionS0 ) + " " + calUnit ); // create metadata final OmeNgffMultiScaleMetadata[] meta = OMEZarrAttibutes.createOMEZarrMetadata( 5, // int n getContainerGroupPath(), // String name, I also saw "/" resolutionS0, // double[] resolutionS0, - "micrometer", //vx.unit() might not be OME-ZARR compatible // String unitXYZ, // e.g micrometer + calUnit, //vx.unit() might not be OME-ZARR compatible // String unitXYZ, // e.g micrometer mrInfos[ 0 ].length, // int numResolutionLevels, (level) -> "/" + level, // OME-ZARR metadata will be created relative to the provided group levelToMipmapTransform ); // save metadata - - //org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMetadata - // for this to work you need to register an adapter in the N5Factory class - // final GsonBuilder builder = new GsonBuilder().registerTypeAdapter( CoordinateTransformation.class, new CoordinateTransformationAdapter() ); driverVolumeWriter.setAttribute( getContainerGroupPath(), "multiscales", meta ); } if ( bdv ) { System.out.println( "Creating BDV compatible container at '" + outPathURI + "' ... " ); - if ( storageType == StorageFormat.N5 ) driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", "BDV/N5" ); else if ( storageType == StorageFormat.ZARR ) @@ -422,14 +429,12 @@ else if ( storageType == StorageFormat.ZARR ) tps.add( new TimePoint( t ) ); // extract the resolution of the s0 export - // TODO: this is inaccurate, we should actually estimate it from the final transformn that is applied - // TODO: this is a hack (returns 1,1,1) so the export downsampling pyramid is working - final VoxelDimensions vx = new FinalVoxelDimensions( "micrometer", new double[] { 1, 1, 1 } );// dataGlobal.getSequenceDescription().getViewSetupsOrdered().iterator().next().getVoxelSize(); - final double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( vx ); + + final double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( cal, avgAnisotropy, Double.NaN ); System.out.println( "Resolution of level 0: " + Util.printCoordinates( resolutionS0 ) + " " + "m" ); //vx.unit() might not be OME-ZARR compatiblevx.unit() ); - final VoxelDimensions vxNew = new FinalVoxelDimensions( "micrometer", resolutionS0 ); + final VoxelDimensions vxNew = new FinalVoxelDimensions( calUnit, resolutionS0 ); for ( int c = 0; c < numChannels; ++c ) { @@ -535,6 +540,31 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.HDF5 ) return null; } + private void updateAnisotropyAndCalibration( SpimData2 dataGlobal, List viewIdsGlobal ) + { + ViewRegistrations registrations = dataGlobal.getViewRegistrations(); + // get all view descriptions + List vds = SpimData2.getAllViewDescriptionsSorted(dataGlobal, viewIdsGlobal); + // group by timepoint and channel + Set> groupingFactors = new HashSet<>(Arrays.asList(TimePoint.class, Channel.class)); + List> fusionGroups = Group.splitBy( vds, groupingFactors ); + Pair calAndUnit = fusionGroups.stream().findFirst() + .map(group -> TransformationTools.computeAverageCalibration(group, registrations)) + .orElse(new ValuePair<>(new double[]{ 1, 1, 1 }, "micrometer")); + cal = calAndUnit.getA(); + calUnit = calAndUnit.getB(); + + if (preserveAnisotropy) { + if (!Double.isNaN(this.anisotropyFactor)) { + avgAnisotropy = this.anisotropyFactor; + } else { + avgAnisotropy = TransformationTools.getAverageAnisotropyFactor(dataGlobal, viewIdsGlobal); + } + } else { + avgAnisotropy = Double.NaN; + } + } + public static void main(final String... args) throws SpimDataException { System.out.println(Arrays.toString(args)); diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java b/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java index 98f547aa..7460f0df 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java @@ -33,7 +33,6 @@ import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; -import java.util.stream.Collectors; import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaRDD; @@ -767,18 +766,18 @@ else if ( dataType == DataType.UINT16 ) rddDSResult.cache(); rddDSResult.count(); - + // extract all blocks that failed final Set failedBlocksSet = retryTrackerDS.processWithSpark( rddDSResult, grid ); - + // Use RetryTracker to handle retry counting and removal if (!retryTrackerDS.processFailures(failedBlocksSet)) { System.out.println( "Stopping." ); System.exit( 1 ); } - + // Update grid for next iteration with remaining failed blocks grid.clear(); grid.addAll(failedBlocksSet); @@ -792,13 +791,6 @@ else if ( dataType == DataType.UINT16 ) // close main writer (is shared over Spark-threads if it's HDF5, thus just closing it here) driverVolumeWriter.close(); - /* - if ( multiRes ) - System.out.println( "Saved, e.g. view with './n5-view -i " + n5PathURI + " -d " + n5Dataset.substring( 0, n5Dataset.length() - 3) + "'" ); - else - System.out.println( "Saved, e.g. view with './n5-view -i " + n5PathURI + " -d " + n5Dataset + "'" ); - */ - System.out.println( "done, took: " + (System.currentTimeMillis() - totalTime ) + " ms." ); sc.close(); diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java b/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java index 2244a11c..d4531d78 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java @@ -34,6 +34,7 @@ import bdv.img.n5.N5ImageLoader; import mpicbg.spim.data.sequence.ViewId; +import mpicbg.spim.data.sequence.VoxelDimensions; import net.imglib2.util.Util; import net.imglib2.util.ValuePair; import net.preibisch.bigstitcher.spark.abstractcmdline.AbstractBasic; @@ -214,13 +215,14 @@ public Void call() throws Exception else { System.out.println( Arrays.toString( blockSize ) ); - + VoxelDimensions vx = dataGlobal.getSequenceDescription().getViewDescription( viewId ).getViewSetup().getVoxelSize(); mrInfo = N5ApiTools.setupBdvDatasetsOMEZARR( n5Writer, viewId, dataTypes.get( viewId.getViewSetupId() ), dimensions.get( viewId.getViewSetupId() ), - dataGlobal.getSequenceDescription().getViewDescription( viewId ).getViewSetup().getVoxelSize().dimensionsAsDoubleArray(), + vx.dimensionsAsDoubleArray(), + vx.unit(), compression, blockSize, downsamplings); @@ -331,8 +333,7 @@ public Void call() throws Exception } final JavaRDD rddsN = sc.parallelize(allBlocks, Math.min( Spark.maxPartitions, allBlocks.size() ) ); - - + final JavaRDD rdds0Result = rddsN.map( gridBlock -> { final N5Writer n5Lcl = URITools.instantiateN5Writer( useN5 ? StorageFormat.N5 : StorageFormat.ZARR, n5PathURI ); From 7daab2830d778667d110781ab7070219ca7f18b7 Mon Sep 17 00:00:00 2001 From: Cristian Goina Date: Fri, 12 Dec 2025 14:24:25 -0500 Subject: [PATCH 21/21] getting the package to compile --- pom.xml | 2 +- .../bigstitcher/spark/Compressions.java | 5 ++ .../spark/CreateFusionContainer.java | 75 ++++++++++++------- .../preibisch/bigstitcher/spark/Solver.java | 21 +++--- .../SparkGeometricDescriptorMatching.java | 15 ++-- .../bigstitcher/spark/SparkResaveN5.java | 13 ++-- .../bigstitcher/spark/SplitDatasets.java | 2 +- .../bigstitcher/spark/util/N5Util.java | 2 +- 8 files changed, 83 insertions(+), 52 deletions(-) create mode 100644 src/main/java/net/preibisch/bigstitcher/spark/Compressions.java diff --git a/pom.xml b/pom.xml index 58e66549..eb9096cb 100644 --- a/pom.xml +++ b/pom.xml @@ -103,7 +103,7 @@ 1.0.2 2.3.5 - 8.0.1-SNAPSHOT + 8.1.2-SNAPSHOT 2.5.0 2.3.0 diff --git a/src/main/java/net/preibisch/bigstitcher/spark/Compressions.java b/src/main/java/net/preibisch/bigstitcher/spark/Compressions.java new file mode 100644 index 00000000..847912a1 --- /dev/null +++ b/src/main/java/net/preibisch/bigstitcher/spark/Compressions.java @@ -0,0 +1,5 @@ +package net.preibisch.bigstitcher.spark; + +public enum Compressions { + Lz4, Gzip, Zstandard, Blosc, Bzip2, Xz, Raw +} diff --git a/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java b/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java index 22331983..39242968 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java @@ -7,11 +7,18 @@ import java.util.Arrays; import java.util.Collection; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.Callable; import java.util.function.Function; +import mpicbg.spim.data.generic.base.Entity; +import mpicbg.spim.data.registration.ViewRegistrations; +import net.imglib2.util.Pair; +import net.imglib2.util.ValuePair; +import net.preibisch.mvrecon.process.interestpointregistration.pairwise.constellation.grouping.Group; import org.janelia.saalfeldlab.n5.Compression; import org.janelia.saalfeldlab.n5.DataType; import org.janelia.saalfeldlab.n5.N5Writer; @@ -59,8 +66,6 @@ public class CreateFusionContainer extends AbstractBasic implements Callable numTimepointsXML ) System.out.println( "Fusion target: " + boundingBox.getTitle() + ": " + Util.printInterval( boundingBox ) + " with blocksize " + Util.printCoordinates( blockSize ) ); // compression and data type - final Compression compression = N5Util.getCompression( this.compression, this.compressionLevel ); + final Compression compression = N5Util.getCompression( this.compressionType, this.compressionLevel ); - System.out.println( "Compression: " + this.compression ); + System.out.println( "Compression: " + this.compressionType ); System.out.println( "Compression level: " + ( compressionLevel == null ? "default" : compressionLevel ) ); final DataType dt; @@ -362,29 +370,23 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.ZARR ) final Function levelToMipmapTransform = (level) -> MipmapTransforms.getMipmapTransformDefault( mrInfo[level].absoluteDownsamplingDouble() ); + updateAnisotropyAndCalibration(dataGlobal, viewIdsGlobal); // extract the resolution of the s0 export - // TODO: this is inaccurate, we should actually estimate it from the final transformn that is applied - // TODO: this is a hack (returns 1,1,1) so the export downsampling pyramid is working - final VoxelDimensions vx = dataGlobal.getSequenceDescription().getViewSetupsOrdered().iterator().next().getVoxelSize(); - final double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( vx ); + final double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( cal, avgAnisotropy, Double.NaN ); - System.out.println( "Resolution of level 0: " + Util.printCoordinates( resolutionS0 ) + " " + "micrometer" ); //vx.unit() might not be OME-ZARR compatiblevx.unit() ); + System.out.println( "Resolution of level 0: " + Util.printCoordinates( resolutionS0 ) + " " + calUnit ); // create metadata final OmeNgffMultiScaleMetadata[] meta = OMEZarrAttibutes.createOMEZarrMetadata( 5, // int n "/", // String name, I also saw "/" resolutionS0, // double[] resolutionS0, - "micrometer", //vx.unit() might not be OME-ZARR compatible // String unitXYZ, // e.g micrometer + calUnit, //vx.unit() might not be OME-ZARR compatible // String unitXYZ, // e.g micrometer mrInfos[ 0 ].length, // int numResolutionLevels, levelToName, levelToMipmapTransform ); // save metadata - - //org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMetadata - // for this to work you need to register an adapter in the N5Factory class - // final GsonBuilder builder = new GsonBuilder().registerTypeAdapter( CoordinateTransformation.class, new CoordinateTransformationAdapter() ); driverVolumeWriter.setAttribute( "/", "multiscales", meta ); } @@ -410,12 +412,12 @@ else if ( storageType == StorageFormat.ZARR ) tps.add( new TimePoint( t ) ); // extract the resolution of the s0 export - final VoxelDimensions vx = dataGlobal.getSequenceDescription().getViewSetupsOrdered().iterator().next().getVoxelSize(); - final double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( vx ); + + final double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( cal, avgAnisotropy, Double.NaN ); System.out.println( "Resolution of level 0: " + Util.printCoordinates( resolutionS0 ) + " " + "m" ); //vx.unit() might not be OME-ZARR compatiblevx.unit() ); - final VoxelDimensions vxNew = new FinalVoxelDimensions( "micrometer", resolutionS0 ); + final VoxelDimensions vxNew = new FinalVoxelDimensions( calUnit, resolutionS0 ); for ( int c = 0; c < numChannels; ++c ) { @@ -441,7 +443,7 @@ else if ( storageType == StorageFormat.ZARR ) for ( int t = 0; t < numTimepoints; ++t ) { final OMEZARREntry omeZarrEntry = new OMEZARREntry( - mrInfos[ 0 ][ 0 ].dataset.substring(0, mrInfos[ 0 ][ 0 ].dataset.lastIndexOf( "/" ) ), + mrInfos[ t ][ c ].dataset.substring(0, mrInfos[ t ][ c ].dataset.lastIndexOf( "/" ) ), new int[] { c, t } ); viewIdToPath.put( new ViewId( t, c ), omeZarrEntry ); @@ -521,16 +523,35 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.HDF5 ) return null; } - public static void main(final String... args) throws SpimDataException + private void updateAnisotropyAndCalibration( SpimData2 dataGlobal, List viewIdsGlobal ) { + ViewRegistrations registrations = dataGlobal.getViewRegistrations(); + // get all view descriptions + List vds = SpimData2.getAllViewDescriptionsSorted(dataGlobal, viewIdsGlobal); + // group by timepoint and channel + Set> groupingFactors = new HashSet<>(Arrays.asList(TimePoint.class, Channel.class)); + List> fusionGroups = Group.splitBy( vds, groupingFactors ); + Pair calAndUnit = fusionGroups.stream().findFirst() + .map(group -> TransformationTools.computeAverageCalibration(group, registrations)) + .orElse(new ValuePair<>(new double[]{ 1, 1, 1 }, "micrometer")); + cal = calAndUnit.getA(); + calUnit = calAndUnit.getB(); + + if (preserveAnisotropy) { + if (!Double.isNaN(this.anisotropyFactor)) { + avgAnisotropy = this.anisotropyFactor; + } else { + avgAnisotropy = TransformationTools.getAverageAnisotropyFactor(dataGlobal, viewIdsGlobal); + } + } else { + avgAnisotropy = Double.NaN; + } + } - //final XmlIoSpimData io = new XmlIoSpimData(); - //final SpimData spimData = io.load( "/Users/preibischs/Documents/Microscopy/Stitching/Truman/standard/output/dataset.xml" ); - //BdvFunctions.show( spimData ); - //SimpleMultiThreading.threadHaltUnClean(); - + public static void main(final String... args) throws SpimDataException + { System.out.println(Arrays.toString(args)); System.exit(new CommandLine(new CreateFusionContainer()).execute(args)); } -} \ No newline at end of file +} diff --git a/src/main/java/net/preibisch/bigstitcher/spark/Solver.java b/src/main/java/net/preibisch/bigstitcher/spark/Solver.java index b27594b2..e9a0f236 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/Solver.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/Solver.java @@ -289,7 +289,7 @@ public Void call() throws Exception return null; } - final GlobalOptimizationParameters globalOptParameters = new GlobalOptimizationParameters(relativeThreshold, absoluteThreshold, globalOptType, false ); + final GlobalOptimizationParameters globalOptParameters = new GlobalOptimizationParameters(relativeThreshold, absoluteThreshold, globalOptType, false, false ); final Collection< Pair< Group< ViewId >, Group< ViewId > > > removedInconsistentPairs = new ArrayList<>(); final HashMap models; final Model model = createModelInstance(transformationModel, regularizationModel, regularizationLambda); @@ -299,7 +299,8 @@ public Void call() throws Exception final ConvergenceStrategy cs = new ConvergenceStrategy( maxError, maxIterations, maxPlateauwidth ); models = (HashMap)GlobalOpt.computeTiles( - (Model)(Object)model, + (Model)model, + globalOptParameters.preAlign, pmc, cs, fixedViewIds, @@ -308,7 +309,8 @@ public Void call() throws Exception else if ( globalOptParameters.method == GlobalOptType.ONE_ROUND_ITERATIVE ) { models = (HashMap)GlobalOptIterative.computeTiles( - (Model)(Object)model, + (Model)model, + globalOptParameters.preAlign, pmc, new SimpleIterativeConvergenceStrategy( Double.MAX_VALUE, maxIterations, maxPlateauwidth, globalOptParameters.relativeThreshold, globalOptParameters.absoluteThreshold ), new MaxErrorLinkRemoval(), @@ -322,7 +324,8 @@ else if ( globalOptParameters.method == GlobalOptType.ONE_ROUND_ITERATIVE ) globalOptParameters.relativeThreshold = globalOptParameters.absoluteThreshold = Double.MAX_VALUE; models = (HashMap)GlobalOptTwoRound.computeTiles( - (Model & Affine3D)(Object)model, + (Model & Affine3D)model, + globalOptParameters.preAlign, pmc, new SimpleIterativeConvergenceStrategy( Double.MAX_VALUE, maxIterations, maxPlateauwidth, globalOptParameters.relativeThreshold, globalOptParameters.absoluteThreshold ), // if it's simple, both will be Double.MAX new MaxErrorLinkRemoval(), @@ -512,18 +515,18 @@ public static InterestPointMatchCreator setupPointMatchesFromInterestPoints( pairResult.setLabelA( labelA ); pairResult.setLabelB( labelB ); - final List cpA = dataGlobal.getViewInterestPoints().getViewInterestPointLists( vA ).getInterestPointList( labelA ).getCorrespondingInterestPointsCopy(); + final Collection cpA = dataGlobal.getViewInterestPoints().getViewInterestPointLists( vA ).getInterestPointList( labelA ).getCorrespondingInterestPointsCopy(); //List cpB = dataGlobal.getViewInterestPoints().getViewInterestPointLists( vB ).getInterestPointList( label ).getCorrespondingInterestPointsCopy(); - final List ipListA = dataGlobal.getViewInterestPoints().getViewInterestPointLists( vA ).getInterestPointList( labelA ).getInterestPointsCopy(); - final List ipListB = dataGlobal.getViewInterestPoints().getViewInterestPointLists( vB ).getInterestPointList( labelB ).getInterestPointsCopy(); + final Map ipMapA = dataGlobal.getViewInterestPoints().getViewInterestPointLists( vA ).getInterestPointList( labelA ).getInterestPointsCopy(); + final Map ipMapB = dataGlobal.getViewInterestPoints().getViewInterestPointLists( vB ).getInterestPointList( labelB ).getInterestPointsCopy(); for ( final CorrespondingInterestPoints p : cpA ) { if ( p.getCorrespodingLabel().equals( labelB ) && p.getCorrespondingViewId().equals( vB ) ) { - InterestPoint ipA = ipListA.get( p.getDetectionId() ); - InterestPoint ipB = ipListB.get( p.getCorrespondingDetectionId() ); + InterestPoint ipA = ipMapA.get( p.getDetectionId() ); + InterestPoint ipB = ipMapB.get( p.getCorrespondingDetectionId() ); // we need to copy the array because it might not be bijective // (some points in one list might correspond with the same point in the other list) diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SparkGeometricDescriptorMatching.java b/src/main/java/net/preibisch/bigstitcher/spark/SparkGeometricDescriptorMatching.java index ad4103cf..ea859837 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/SparkGeometricDescriptorMatching.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/SparkGeometricDescriptorMatching.java @@ -24,6 +24,7 @@ import java.net.URI; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -283,7 +284,7 @@ else if ( ransacIterations == null ) } ); // load & transform all interest points - final Map< ViewId, HashMap< String, List< InterestPoint > > > interestpoints = + final Map< ViewId, HashMap< String, Collection< InterestPoint > > > interestpoints = TransformationTools.getAllTransformedInterestPoints( views, data.getViewRegistrations().getViewRegistrations(), @@ -299,8 +300,8 @@ else if ( ransacIterations == null ) interestpoints, groups, data.getViewRegistrations().getViewRegistrations(), data.getSequenceDescription().getViewDescriptions() ); System.out.println( Group.pvid( task.vA ) + " (" + task.labelA + ") <=> " + Group.pvid( task.vB ) + " (" + task.labelB + "): Remaining interest points for alignment: " ); - for ( final Entry< ViewId, HashMap< String, List< InterestPoint > > > element: interestpoints.entrySet() ) - for ( final Entry< String, List< InterestPoint > > subElement : element.getValue().entrySet() ) + for ( final Entry< ViewId, HashMap< String, Collection< InterestPoint > > > element: interestpoints.entrySet() ) + for ( final Entry< String, Collection< InterestPoint > > subElement : element.getValue().entrySet() ) System.out.println( Group.pvid( element.getKey() ) + ", '" + subElement.getKey() + "' : " + subElement.getValue().size() ); } @@ -390,7 +391,7 @@ else if ( ransacIterations == null ) } ); // load & transform all interest points - final Map< ViewId, HashMap< String, List< InterestPoint > > > interestpoints = + final Map< ViewId, HashMap< String, Collection< InterestPoint >> > interestpoints = TransformationTools.getAllTransformedInterestPoints( views, data.getViewRegistrations().getViewRegistrations(), @@ -410,12 +411,12 @@ else if ( ransacIterations == null ) TransformationTools.filterForOverlappingInterestPoints( interestpoints, groups, data.getViewRegistrations().getViewRegistrations(), data.getSequenceDescription().getViewDescriptions() ); System.out.println( task.vA + " (" + task.labelA + ") <=> " + task.vB + " (" + task.labelB + "): Remaining interest points for alignment: " ); - for ( final Entry< ViewId, HashMap< String, List< InterestPoint > > > element: interestpoints.entrySet() ) - for ( final Entry< String, List< InterestPoint > > subElement : element.getValue().entrySet() ) + for ( final Entry< ViewId, HashMap< String, Collection< InterestPoint > > > element: interestpoints.entrySet() ) + for ( final Entry< String, Collection< InterestPoint > > subElement : element.getValue().entrySet() ) System.out.println( Group.pvid( element.getKey() ) + ", '" + subElement.getKey() + "' : " + subElement.getValue().size() ); } - final Map< Group< ViewId >, HashMap< String, List< GroupedInterestPoint< ViewId > > > > groupedInterestpoints = new HashMap<>(); + final Map< Group< ViewId >, HashMap< String, Collection< GroupedInterestPoint< ViewId > > > > groupedInterestpoints = new HashMap<>(); final InterestPointGroupingMinDistance< ViewId > ipGrouping = new InterestPointGroupingMinDistance<>( interestPointMergeDistance, interestpoints ); diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java b/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java index c708b8f8..effb44fa 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java @@ -43,9 +43,9 @@ import bdv.img.n5.N5ImageLoader; import mpicbg.spim.data.sequence.ViewId; +import mpicbg.spim.data.sequence.VoxelDimensions; import net.imglib2.util.Util; import net.imglib2.util.ValuePair; -import net.preibisch.bigstitcher.spark.CreateFusionContainer.Compressions; import net.preibisch.bigstitcher.spark.abstractcmdline.AbstractBasic; import net.preibisch.bigstitcher.spark.util.Import; import net.preibisch.bigstitcher.spark.util.N5Util; @@ -96,7 +96,7 @@ public class SparkResaveN5 extends AbstractBasic implements Callable, Seri @Option(names = {"-c", "--compression"}, defaultValue = "Zstandard", showDefaultValue = CommandLine.Help.Visibility.ALWAYS, description = "Dataset compression") - private Compressions compression = null; + private Compressions compressionType = null; @Option(names = {"-cl", "--compressionLevel" }, description = "compression level, if supported by the codec (default: gzip 1, Zstandard 3, xz 6)") private Integer compressionLevel = null; @@ -164,7 +164,7 @@ public Void call() throws Exception } final URI n5PathURI = URITools.toURI( this.n5PathURIString == null ? URITools.appendName( URITools.getParentURI( xmlOutURI ), (useN5 ? "dataset.n5" : "dataset.ome.zarr") ) : n5PathURIString ); - final Compression compression = N5Util.getCompression( this.compression, this.compressionLevel ); + final Compression compression = N5Util.getCompression( this.compressionType, this.compressionLevel ); final int[] blockSize = Import.csvStringToIntArray(blockSizeString); final int[] blockScale = Import.csvStringToIntArray(blockScaleString); @@ -177,7 +177,7 @@ public Void call() throws Exception //final N5Writer n5 = new N5FSWriter(n5Path); final N5Writer n5Writer = URITools.instantiateN5Writer( useN5 ? StorageFormat.N5 : StorageFormat.ZARR, n5PathURI ); - System.out.println( "Compression: " + this.compression ); + System.out.println( "Compression: " + this.compressionType ); System.out.println( "Compression level: " + ( compressionLevel == null ? "default" : compressionLevel ) ); System.out.println( "N5 block size=" + Util.printCoordinates( blockSize ) ); System.out.println( "Compute block size=" + Util.printCoordinates( computeBlockSize ) ); @@ -242,13 +242,14 @@ public Void call() throws Exception else { System.out.println( Arrays.toString( blockSize ) ); - + VoxelDimensions vx = dataGlobal.getSequenceDescription().getViewDescription( viewId ).getViewSetup().getVoxelSize(); mrInfo = N5ApiTools.setupBdvDatasetsOMEZARR( n5Writer, viewId, dataTypes.get( viewId.getViewSetupId() ), dimensions.get( viewId.getViewSetupId() ), - dataGlobal.getSequenceDescription().getViewDescription( viewId ).getViewSetup().getVoxelSize().dimensionsAsDoubleArray(), + vx.dimensionsAsDoubleArray(), + vx.unit(), compression, blockSize, downsamplings); diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SplitDatasets.java b/src/main/java/net/preibisch/bigstitcher/spark/SplitDatasets.java index 84893355..f2fff98b 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/SplitDatasets.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/SplitDatasets.java @@ -41,7 +41,7 @@ public class SplitDatasets extends AbstractBasic private boolean disableOptimization = false; @Option(names = { "-fip", "--fakeInterestPoints" }, description = "add fake interest points to overlapping regions of split images/views") - private boolean fakeInterestPoints = false; + private Split_Views.InterestPointAdding fakeInterestPoints = Split_Views.InterestPointAdding.NONE; @Option(names = { "--fipDensity" }, description = "density of fake interest points; number of points per 100x100x100 px volume (default: 100.0)") private double fipDensity = 100.0; diff --git a/src/main/java/net/preibisch/bigstitcher/spark/util/N5Util.java b/src/main/java/net/preibisch/bigstitcher/spark/util/N5Util.java index 7d5b52c4..a93a81f1 100644 --- a/src/main/java/net/preibisch/bigstitcher/spark/util/N5Util.java +++ b/src/main/java/net/preibisch/bigstitcher/spark/util/N5Util.java @@ -36,7 +36,7 @@ import org.janelia.saalfeldlab.n5.universe.StorageFormat; import org.janelia.scicomp.n5.zstandard.ZstandardCompression; -import net.preibisch.bigstitcher.spark.CreateFusionContainer.Compressions; +import net.preibisch.bigstitcher.spark.Compressions; import net.preibisch.legacy.io.IOFunctions; import util.URITools;