diff --git a/install b/install
index c5de74e..047c57d 100755
--- a/install
+++ b/install
@@ -129,6 +129,8 @@ install_command solve-intensities "net.preibisch.bigstitcher.spark.IntensitySolv
install_command create-fusion-container "net.preibisch.bigstitcher.spark.CreateFusionContainer"
install_command affine-fusion "net.preibisch.bigstitcher.spark.SparkAffineFusion"
install_command nonrigid-fusion "net.preibisch.bigstitcher.spark.SparkNonRigidFusion"
+install_command create-dataset "net.preibisch.bigstitcher.spark.CreateDataset"
+install_command chain-commands "net.preibisch.bigstitcher.spark.ChainCommands"
echo 'Installing utils ...'
diff --git a/pom.xml b/pom.xml
index 8e2e35b..eb9096c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -103,7 +103,7 @@
1.0.2
2.3.5
- 8.0.0
+ 8.1.2-SNAPSHOT
2.5.0
2.3.0
diff --git a/src/main/java/net/preibisch/bigstitcher/spark/ChainCommands.java b/src/main/java/net/preibisch/bigstitcher/spark/ChainCommands.java
new file mode 100644
index 0000000..62e21c9
--- /dev/null
+++ b/src/main/java/net/preibisch/bigstitcher/spark/ChainCommands.java
@@ -0,0 +1,129 @@
+package net.preibisch.bigstitcher.spark;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Stack;
+import java.util.concurrent.Callable;
+
+import mpicbg.spim.data.SpimDataException;
+import net.preibisch.bigstitcher.spark.abstractcmdline.AbstractInfrastructure;
+import picocli.CommandLine;
+
+public class ChainCommands extends AbstractInfrastructure implements Callable, Serializable
+{
+ private static final long serialVersionUID = 1584686229152127469L;
+
+ static class CommandWithArguments {
+ final List cmdArgs;
+
+ CommandWithArguments(List cmdArgs) {
+ this.cmdArgs = cmdArgs;
+ }
+ }
+
+ /**
+ * Custom converter to collect all arguments after --command until a separator
+ * (either ';' or '+') or the end of the input.
+ */
+ static class CommandArgsConverter implements CommandLine.IParameterConsumer {
+ @Override
+ public void consumeParameters(Stack args, CommandLine.Model.ArgSpec argSpec, CommandLine.Model.CommandSpec commandSpec) {
+ List currentCommands = argSpec.getValue();
+ List commandArgs = new ArrayList<>();
+ while (!args.isEmpty()) {
+ String arg = args.pop();
+
+ if (";".equals(arg) || "+".equals(arg)) {
+ break;
+ }
+ if (arg.equals("-h") || arg.equals("--help")) {
+ // add back the help flag at the bottom of the stack
+ // but before check if there was anything left and if there wasn't stop after this
+ boolean done = args.isEmpty();
+ args.add(0, arg);
+ if (done) break;
+ } else
+ commandArgs.add(arg);
+ }
+ currentCommands.add(new CommandWithArguments(commandArgs));
+ }
+ }
+
+ @CommandLine.Option(names = { "-h", "--help" }, description = "display this help message", usageHelp = true)
+ boolean helpFlag;
+
+ @CommandLine.Option(names = { "--command" }, parameterConsumer = CommandArgsConverter.class,
+ description = "Command to execute with its arguments. Multiple commands can be chained using ';' or '+'.\n"
+ + "Example: --command create-dataset --input-path /data/images/ --input-pattern '*.tif' ; "
+ + "--command detect-interestpoints --detector SIFT --descriptor SIFT ; "
+ + "--command match-interestpoints --matcher FLANN ; stitching --stitchingModel Affine")
+ List commands = new ArrayList<>();
+
+ @Override
+ public Void call() throws Exception {
+ for (CommandWithArguments commandArgs : commands) {
+ if (commandArgs.cmdArgs.isEmpty())
+ continue;
+
+ String cmdName = commandArgs.cmdArgs.get(0);
+ List cmdArgs = new ArrayList<>(commandArgs.cmdArgs.subList(1, commandArgs.cmdArgs.size()));
+ addCommonOptions(cmdArgs);
+
+ AbstractInfrastructure cmdInstance = getCmdInstance(cmdName);
+ CommandLine currentCmdLine = new CommandLine(cmdInstance);
+ System.out.println("Execute command: " + cmdName + " with args: " + cmdArgs);
+ int exitCode = currentCmdLine.execute(cmdArgs.toArray(new String[0]));
+ if (exitCode != 0) {
+ System.err.println("Command " + cmdName + " failed with exit code " + exitCode);
+ System.exit(exitCode);
+ }
+ }
+ return null;
+ }
+
+ private AbstractInfrastructure getCmdInstance(String name) {
+ switch (name) {
+ case "clear-interestpoints": return new ClearInterestPoints();
+ case "clear-registrations": return new ClearRegistrations();
+ case "create-container": return new CreateFusionContainer();
+ case "detect-interestpoints": return new SparkInterestPointDetection();
+ case "match-interestpoints": return new SparkGeometricDescriptorMatching();
+ case "nonrigid-fusion": return new SparkNonRigidFusion();
+ case "create-dataset": return new CreateDataset();
+ case "stitching": return new SparkPairwiseStitching();
+ case "resave": return new SparkResaveN5();
+ case "downsample": return new SparkDownsample();
+ case "affine-fusion": return new SparkAffineFusion();
+ case "solver": return new Solver();
+ default: throw new IllegalArgumentException("Unknown command: " + name);
+ }
+ }
+
+ private void addCommonOptions(List cmdArgs) {
+ if (this.dryRun) {
+ cmdArgs.add("--dryRun");
+ }
+ if (this.localSparkBindAddress) {
+ cmdArgs.add("--localSparkBindAddress");
+ }
+ if (this.s3Region != null && !this.s3Region.isEmpty()) {
+ cmdArgs.add("--s3Region");
+ cmdArgs.add(this.s3Region);
+ }
+ }
+
+ public static void main(final String... args) throws SpimDataException {
+ System.out.println(Arrays.toString(args));
+
+ ChainCommands chainedCommands = new ChainCommands();
+ CommandLine commandLine = new CommandLine(chainedCommands)
+ .setUnmatchedOptionsArePositionalParams(true)
+ ;
+
+
+ System.exit(commandLine.execute(args));
+ }
+
+}
diff --git a/src/main/java/net/preibisch/bigstitcher/spark/Compressions.java b/src/main/java/net/preibisch/bigstitcher/spark/Compressions.java
new file mode 100644
index 0000000..847912a
--- /dev/null
+++ b/src/main/java/net/preibisch/bigstitcher/spark/Compressions.java
@@ -0,0 +1,5 @@
+package net.preibisch.bigstitcher.spark;
+
+public enum Compressions {
+ Lz4, Gzip, Zstandard, Blosc, Bzip2, Xz, Raw
+}
diff --git a/src/main/java/net/preibisch/bigstitcher/spark/CreateDataset.java b/src/main/java/net/preibisch/bigstitcher/spark/CreateDataset.java
new file mode 100644
index 0000000..9f1efef
--- /dev/null
+++ b/src/main/java/net/preibisch/bigstitcher/spark/CreateDataset.java
@@ -0,0 +1,68 @@
+package net.preibisch.bigstitcher.spark;
+
+import java.io.Serializable;
+import java.net.URI;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Arrays;
+import java.util.concurrent.Callable;
+
+import mpicbg.spim.data.SpimDataException;
+import net.preibisch.bigstitcher.spark.abstractcmdline.AbstractBasic;
+import net.preibisch.mvrecon.dataset.SpimDatasetBuilder;
+import net.preibisch.mvrecon.fiji.spimdata.SpimData2;
+import net.preibisch.mvrecon.fiji.spimdata.XmlIoSpimData2;
+import picocli.CommandLine;
+import picocli.CommandLine.Option;
+import util.URITools;
+
+public class CreateDataset extends AbstractBasic implements Callable, Serializable
+{
+ private static final long serialVersionUID = -5155338208494730656L;
+
+ @Option(names = {"--input-path"}, required = true, description = "Path to the input images, e.g. /data/images/")
+ private String inputPath = "/Users/goinac/Work/HHMI/stitching/datasets/tiny_4_bigstitcher/t1/";
+
+ @Option(names = {"--input-pattern"}, description = "Glob pattern for input images, e.g. /data/images/*.tif")
+ private String inputPattern = "*";
+
+ @Override
+ public Void call() throws Exception {
+ this.setRegion();
+
+ SpimData2 spimData = createDataset();
+
+ URI xmlURI = URITools.toURI(xmlURIString);
+
+ System.out.println("Save spimData with original tiles to " + xmlURI);
+ prepareSaveLocation(xmlURI);
+ new XmlIoSpimData2().save(spimData, xmlURI);
+
+ return null;
+ }
+
+ private SpimData2 createDataset() {
+ SpimDatasetBuilder spimDatasetBuilder = new SpimDatasetBuilder(inputPattern);
+ return spimDatasetBuilder.createDataset(URITools.toURI(inputPath));
+ }
+
+ private void prepareSaveLocation(URI xmlURI) {
+ if (URITools.isFile( xmlURI )) {
+ Path xmlPath = Paths.get(xmlURI);
+ // create parent directories if necessary
+ if ( !xmlPath.getParent().toFile().exists() ) {
+ if (!xmlPath.getParent().toFile().mkdirs()) {
+ // log the error but continue
+ // if the directory wasn't create it will fail later when trying to write the file
+ System.out.println("Failed to create parent directory for " + xmlURI);
+ }
+ }
+ }
+ }
+
+ public static void main(final String... args) throws SpimDataException {
+ System.out.println(Arrays.toString(args));
+
+ System.exit(new CommandLine(new CreateDataset()).execute(args));
+ }
+}
diff --git a/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java b/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java
index f83178d..6a6a751 100644
--- a/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java
+++ b/src/main/java/net/preibisch/bigstitcher/spark/CreateFusionContainer.java
@@ -6,12 +6,21 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
+import java.util.Collections;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import java.util.concurrent.Callable;
import java.util.function.Function;
+import mpicbg.spim.data.generic.base.Entity;
+import mpicbg.spim.data.registration.ViewRegistrations;
+import mpicbg.spim.data.sequence.SequenceDescription;
+import net.imglib2.util.Pair;
+import net.imglib2.util.ValuePair;
+import net.preibisch.mvrecon.process.interestpointregistration.pairwise.constellation.grouping.Group;
import org.janelia.saalfeldlab.n5.Compression;
import org.janelia.saalfeldlab.n5.DataType;
import org.janelia.saalfeldlab.n5.N5Writer;
@@ -59,8 +68,6 @@ public class CreateFusionContainer extends AbstractBasic implements Callable numTimepointsXML )
System.out.println( "Fusion target: " + boundingBox.getTitle() + ": " + Util.printInterval( boundingBox ) + " with blocksize " + Util.printCoordinates( blockSize ) );
// compression and data type
- final Compression compression = N5Util.getCompression( this.compression, this.compressionLevel );
+ final Compression compression = N5Util.getCompression( this.compressionType, this.compressionLevel );
- System.out.println( "Compression: " + this.compression );
+ System.out.println( "Compression: " + this.compressionType );
System.out.println( "Compression level: " + ( compressionLevel == null ? "default" : compressionLevel ) );
final DataType dt;
@@ -299,24 +324,27 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.ZARR )
return null;
}
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/InputXML", xmlURI );
+ // if there is a group different from the root, create it
+ if ( ! getContainerGroupPath().equals("/") ) driverVolumeWriter.createGroup( getContainerGroupPath() );
+
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/InputXML", xmlURI );
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/NumTimepoints", numTimepoints );
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/NumChannels", numChannels );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/NumTimepoints", numTimepoints );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/NumChannels", numChannels );
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/Boundingbox_min", boundingBox.minAsLongArray() );
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/Boundingbox_max", boundingBox.maxAsLongArray() );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/Boundingbox_min", boundingBox.minAsLongArray() );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/Boundingbox_max", boundingBox.maxAsLongArray() );
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/PreserveAnisotropy", preserveAnisotropy );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/PreserveAnisotropy", preserveAnisotropy );
if (preserveAnisotropy) // cannot write Double.NaN into JSON
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/AnisotropyFactor", anisotropyFactor );
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/DataType", dt );
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/BlockSize", blockSize );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/AnisotropyFactor", anisotropyFactor );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/DataType", dt );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/BlockSize", blockSize );
if ( minIntensity != null && maxIntensity != null )
{
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/MinIntensity", minIntensity );
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/MaxIntensity", maxIntensity );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/MinIntensity", minIntensity );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/MaxIntensity", maxIntensity );
}
// setup datasets and metadata
@@ -333,7 +361,7 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.ZARR )
System.out.println( "Creating 5D OME-ZARR metadata for '" + outPathURI + "' ... " );
if ( !bdv )
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/FusionFormat", "OME-ZARR" );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", "OME-ZARR" );
final long[] dim3d = boundingBox.dimensionsAsLongArray();
@@ -343,14 +371,12 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.ZARR )
for ( int d = 0; d < ds.length; ++d )
ds[ d ] = new int[] { downsamplings[ d ][ 0 ], downsamplings[ d ][ 1 ], downsamplings[ d ][ 2 ], 1, 1 };
- final Function levelToName = (level) -> "/" + level;
-
mrInfos = new MultiResolutionLevelInfo[ 1 ][];
// all is 5d now
mrInfos[ 0 ] = N5ApiTools.setupMultiResolutionPyramid(
driverVolumeWriter,
- levelToName,
+ (level) -> getContainerGroupPath() + level, // multiscale pyramid will be created for the entire provided group
dt,
dim, //5d
compression,
@@ -362,44 +388,37 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.ZARR )
final Function levelToMipmapTransform =
(level) -> MipmapTransforms.getMipmapTransformDefault( mrInfo[level].absoluteDownsamplingDouble() );
+ updateAnisotropyAndCalibration(dataGlobal, viewIdsGlobal);
// extract the resolution of the s0 export
- // TODO: this is inaccurate, we should actually estimate it from the final transformn that is applied
- // TODO: this is a hack (returns 1,1,1) so the export downsampling pyramid is working
- final VoxelDimensions vx = new FinalVoxelDimensions( "micrometer", new double[] { 1, 1, 1 } );// dataGlobal.getSequenceDescription().getViewSetupsOrdered().iterator().next().getVoxelSize();
- final double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( vx, anisotropyFactor, Double.NaN );
+ final double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( cal, avgAnisotropy, Double.NaN );
- System.out.println( "Resolution of level 0: " + Util.printCoordinates( resolutionS0 ) + " " + "micrometer" ); //vx.unit() might not be OME-ZARR compatiblevx.unit() );
+ System.out.println( "Resolution of level 0: " + Util.printCoordinates( resolutionS0 ) + " " + calUnit );
// create metadata
final OmeNgffMultiScaleMetadata[] meta = OMEZarrAttibutes.createOMEZarrMetadata(
5, // int n
- "/", // String name, I also saw "/"
+ getContainerGroupPath(), // String name, I also saw "/"
resolutionS0, // double[] resolutionS0,
- "micrometer", //vx.unit() might not be OME-ZARR compatible // String unitXYZ, // e.g micrometer
+ calUnit, //vx.unit() might not be OME-ZARR compatible // String unitXYZ, // e.g micrometer
mrInfos[ 0 ].length, // int numResolutionLevels,
- levelToName,
+ (level) -> "/" + level, // OME-ZARR metadata will be created relative to the provided group
levelToMipmapTransform );
// save metadata
-
- //org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMetadata
- // for this to work you need to register an adapter in the N5Factory class
- // final GsonBuilder builder = new GsonBuilder().registerTypeAdapter( CoordinateTransformation.class, new CoordinateTransformationAdapter() );
- driverVolumeWriter.setAttribute( "/", "multiscales", meta );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "multiscales", meta );
}
if ( bdv )
{
System.out.println( "Creating BDV compatible container at '" + outPathURI + "' ... " );
-
if ( storageType == StorageFormat.N5 )
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/FusionFormat", "BDV/N5" );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", "BDV/N5" );
else if ( storageType == StorageFormat.ZARR )
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/FusionFormat", "BDV/OME-ZARR" );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", "BDV/OME-ZARR" );
else
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/FusionFormat", "BDV/HDF5" );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", "BDV/HDF5" );
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/OutputXML", xmlOutURI );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/OutputXML", xmlOutURI );
final long[] bb = boundingBox.dimensionsAsLongArray();
@@ -410,14 +429,12 @@ else if ( storageType == StorageFormat.ZARR )
tps.add( new TimePoint( t ) );
// extract the resolution of the s0 export
- // TODO: this is inaccurate, we should actually estimate it from the final transformn that is applied
- // TODO: this is a hack (returns 1,1,1) so the export downsampling pyramid is working
- final VoxelDimensions vx = new FinalVoxelDimensions( "micrometer", new double[] { 1, 1, 1 } );// dataGlobal.getSequenceDescription().getViewSetupsOrdered().iterator().next().getVoxelSize();
- final double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( vx, anisotropyFactor, Double.NaN );
+
+ final double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( cal, avgAnisotropy, Double.NaN );
System.out.println( "Resolution of level 0: " + Util.printCoordinates( resolutionS0 ) + " " + "m" ); //vx.unit() might not be OME-ZARR compatiblevx.unit() );
- final VoxelDimensions vxNew = new FinalVoxelDimensions( "micrometer", resolutionS0 );
+ final VoxelDimensions vxNew = new FinalVoxelDimensions( calUnit, resolutionS0 );
for ( int c = 0; c < numChannels; ++c )
{
@@ -443,7 +460,7 @@ else if ( storageType == StorageFormat.ZARR )
for ( int t = 0; t < numTimepoints; ++t )
{
final OMEZARREntry omeZarrEntry = new OMEZARREntry(
- mrInfos[ 0 ][ 0 ].dataset.substring(0, mrInfos[ 0 ][ 0 ].dataset.lastIndexOf( "/" ) ),
+ mrInfos[ t ][ c ].dataset.substring(0, mrInfos[ t ][ c ].dataset.lastIndexOf( "/" ) ),
new int[] { c, t } );
viewIdToPath.put( new ViewId( t, c ), omeZarrEntry );
@@ -476,7 +493,7 @@ else if ( storageType == StorageFormat.ZARR )
myMrInfo[ c + t*c ] = N5ApiTools.setupBdvDatasetsN5(
driverVolumeWriter, vd, dt, bb, compression, blockSize, downsamplings);
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/FusionFormat", "BDV/N5" );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", "BDV/N5" );
}
else // HDF5
{
@@ -492,9 +509,9 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.HDF5 )
mrInfos = new MultiResolutionLevelInfo[ numChannels * numTimepoints ][];
if ( storageType == StorageFormat.N5 )
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/FusionFormat", "N5" );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", "N5" );
else
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/FusionFormat", "HDF5" );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", "HDF5" );
for ( int c = 0; c < numChannels; ++c )
for ( int t = 0; t < numTimepoints; ++t )
@@ -516,23 +533,42 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.HDF5 )
}
// TODO: set extra attributes to load the state
- driverVolumeWriter.setAttribute( "/", "Bigstitcher-Spark/MultiResolutionInfos", mrInfos );
+ driverVolumeWriter.setAttribute( getContainerGroupPath(), "Bigstitcher-Spark/MultiResolutionInfos", mrInfos );
driverVolumeWriter.close();
return null;
}
- public static void main(final String... args) throws SpimDataException
+ private void updateAnisotropyAndCalibration( SpimData2 dataGlobal, List viewIdsGlobal )
{
+ ViewRegistrations registrations = dataGlobal.getViewRegistrations();
+ // get all view descriptions
+ List vds = SpimData2.getAllViewDescriptionsSorted(dataGlobal, viewIdsGlobal);
+ // group by timepoint and channel
+ Set> groupingFactors = new HashSet<>(Arrays.asList(TimePoint.class, Channel.class));
+ List> fusionGroups = Group.splitBy( vds, groupingFactors );
+ Pair calAndUnit = fusionGroups.stream().findFirst()
+ .map(group -> TransformationTools.computeAverageCalibration(group, registrations))
+ .orElse(new ValuePair<>(new double[]{ 1, 1, 1 }, "micrometer"));
+ cal = calAndUnit.getA();
+ calUnit = calAndUnit.getB();
+
+ if (preserveAnisotropy) {
+ if (!Double.isNaN(this.anisotropyFactor)) {
+ avgAnisotropy = this.anisotropyFactor;
+ } else {
+ avgAnisotropy = TransformationTools.getAverageAnisotropyFactor(dataGlobal, viewIdsGlobal);
+ }
+ } else {
+ avgAnisotropy = Double.NaN;
+ }
+ }
- //final XmlIoSpimData io = new XmlIoSpimData();
- //final SpimData spimData = io.load( "/Users/preibischs/Documents/Microscopy/Stitching/Truman/standard/output/dataset.xml" );
- //BdvFunctions.show( spimData );
- //SimpleMultiThreading.threadHaltUnClean();
-
+ public static void main(final String... args) throws SpimDataException
+ {
System.out.println(Arrays.toString(args));
System.exit(new CommandLine(new CreateFusionContainer()).execute(args));
}
-}
\ No newline at end of file
+}
diff --git a/src/main/java/net/preibisch/bigstitcher/spark/Solver.java b/src/main/java/net/preibisch/bigstitcher/spark/Solver.java
index b27594b..e9a0f23 100644
--- a/src/main/java/net/preibisch/bigstitcher/spark/Solver.java
+++ b/src/main/java/net/preibisch/bigstitcher/spark/Solver.java
@@ -289,7 +289,7 @@ public Void call() throws Exception
return null;
}
- final GlobalOptimizationParameters globalOptParameters = new GlobalOptimizationParameters(relativeThreshold, absoluteThreshold, globalOptType, false );
+ final GlobalOptimizationParameters globalOptParameters = new GlobalOptimizationParameters(relativeThreshold, absoluteThreshold, globalOptType, false, false );
final Collection< Pair< Group< ViewId >, Group< ViewId > > > removedInconsistentPairs = new ArrayList<>();
final HashMap models;
final Model> model = createModelInstance(transformationModel, regularizationModel, regularizationLambda);
@@ -299,7 +299,8 @@ public Void call() throws Exception
final ConvergenceStrategy cs = new ConvergenceStrategy( maxError, maxIterations, maxPlateauwidth );
models = (HashMap)GlobalOpt.computeTiles(
- (Model)(Object)model,
+ (Model)model,
+ globalOptParameters.preAlign,
pmc,
cs,
fixedViewIds,
@@ -308,7 +309,8 @@ public Void call() throws Exception
else if ( globalOptParameters.method == GlobalOptType.ONE_ROUND_ITERATIVE )
{
models = (HashMap)GlobalOptIterative.computeTiles(
- (Model)(Object)model,
+ (Model)model,
+ globalOptParameters.preAlign,
pmc,
new SimpleIterativeConvergenceStrategy( Double.MAX_VALUE, maxIterations, maxPlateauwidth, globalOptParameters.relativeThreshold, globalOptParameters.absoluteThreshold ),
new MaxErrorLinkRemoval(),
@@ -322,7 +324,8 @@ else if ( globalOptParameters.method == GlobalOptType.ONE_ROUND_ITERATIVE )
globalOptParameters.relativeThreshold = globalOptParameters.absoluteThreshold = Double.MAX_VALUE;
models = (HashMap)GlobalOptTwoRound.computeTiles(
- (Model & Affine3D)(Object)model,
+ (Model & Affine3D)model,
+ globalOptParameters.preAlign,
pmc,
new SimpleIterativeConvergenceStrategy( Double.MAX_VALUE, maxIterations, maxPlateauwidth, globalOptParameters.relativeThreshold, globalOptParameters.absoluteThreshold ), // if it's simple, both will be Double.MAX
new MaxErrorLinkRemoval(),
@@ -512,18 +515,18 @@ public static InterestPointMatchCreator setupPointMatchesFromInterestPoints(
pairResult.setLabelA( labelA );
pairResult.setLabelB( labelB );
- final List cpA = dataGlobal.getViewInterestPoints().getViewInterestPointLists( vA ).getInterestPointList( labelA ).getCorrespondingInterestPointsCopy();
+ final Collection cpA = dataGlobal.getViewInterestPoints().getViewInterestPointLists( vA ).getInterestPointList( labelA ).getCorrespondingInterestPointsCopy();
//List cpB = dataGlobal.getViewInterestPoints().getViewInterestPointLists( vB ).getInterestPointList( label ).getCorrespondingInterestPointsCopy();
- final List ipListA = dataGlobal.getViewInterestPoints().getViewInterestPointLists( vA ).getInterestPointList( labelA ).getInterestPointsCopy();
- final List ipListB = dataGlobal.getViewInterestPoints().getViewInterestPointLists( vB ).getInterestPointList( labelB ).getInterestPointsCopy();
+ final Map ipMapA = dataGlobal.getViewInterestPoints().getViewInterestPointLists( vA ).getInterestPointList( labelA ).getInterestPointsCopy();
+ final Map ipMapB = dataGlobal.getViewInterestPoints().getViewInterestPointLists( vB ).getInterestPointList( labelB ).getInterestPointsCopy();
for ( final CorrespondingInterestPoints p : cpA )
{
if ( p.getCorrespodingLabel().equals( labelB ) && p.getCorrespondingViewId().equals( vB ) )
{
- InterestPoint ipA = ipListA.get( p.getDetectionId() );
- InterestPoint ipB = ipListB.get( p.getCorrespondingDetectionId() );
+ InterestPoint ipA = ipMapA.get( p.getDetectionId() );
+ InterestPoint ipB = ipMapB.get( p.getCorrespondingDetectionId() );
// we need to copy the array because it might not be bijective
// (some points in one list might correspond with the same point in the other list)
diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java b/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java
index dc62e11..7460f0d 100644
--- a/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java
+++ b/src/main/java/net/preibisch/bigstitcher/spark/SparkAffineFusion.java
@@ -33,7 +33,6 @@
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import java.util.stream.Collectors;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
@@ -153,7 +152,6 @@ public enum DataTypeFusion
@Option(names = { "--prefetch" }, description = "prefetch all blocks required for fusion in each Spark job using unlimited threads, useful in cloud environments (default: false)")
protected boolean prefetch = false;
-
// TODO: add support for loading coefficients during fusion
@CommandLine.Option(names = { "--intensityN5Path" }, description = "N5/ZARR/HDF5 base path for loading coefficients (e.g. s3://myBucket/coefficients.n5)")
private String intensityN5PathURIString = null;
@@ -167,6 +165,9 @@ public enum DataTypeFusion
@CommandLine.Option(names = { "--intensityN5Dataset" }, description = "dataset name for each coefficient dataset (default: \"intensity\"). The coefficients for view(s,t) are stored in dataset \"{-n5Group}/setup{s}/timepoint{t}/{n5Dataset}\"")
private String intensityN5Dataset = "intensity";
+ @Option(names = { "--group" }, description = "Container group path")
+ private String groupPath = "";
+
URI outPathURI = null;
/**
* Prefetching now works with a Executors.newCachedThreadPool();
@@ -175,6 +176,18 @@ public enum DataTypeFusion
URI intensityN5PathURI = null;
+ /**
+ * @return container group path always terminated with a '/'
+ */
+ private String getContainerGroupPath()
+ {
+ if (!groupPath.endsWith("/")) {
+ return groupPath + "/";
+ } else {
+ return groupPath;
+ }
+ }
+
@Override
public Void call() throws Exception
{
@@ -238,7 +251,7 @@ else if ( outputPathURIString.toLowerCase().endsWith( ".h5" ) || outPathURI.toSt
final N5Writer driverVolumeWriter = N5Util.createN5Writer( outPathURI, storageType );
- final String fusionFormat = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/FusionFormat", String.class );
+ final String fusionFormat = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/FusionFormat", String.class );
if ( fusionFormat == null )
{
@@ -248,14 +261,14 @@ else if ( outputPathURIString.toLowerCase().endsWith( ".h5" ) || outPathURI.toSt
}
final boolean bdv = fusionFormat.toLowerCase().contains( "BDV" );
- final URI xmlURI = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/InputXML", URI.class );
+ final URI xmlURI = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/InputXML", URI.class );
final int numTimepoints, numChannels;
if ( timepointIndex == null )
{
- numTimepoints = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/NumTimepoints", int.class );
- numChannels = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/NumChannels", int.class );
+ numTimepoints = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/NumTimepoints", int.class );
+ numChannels = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/NumChannels", int.class );
}
else
{
@@ -263,16 +276,16 @@ else if ( outputPathURIString.toLowerCase().endsWith( ".h5" ) || outPathURI.toSt
numTimepoints = numChannels = 1;
}
- final long[] bbMin = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/Boundingbox_min", long[].class );
- final long[] bbMax = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/Boundingbox_max", long[].class );
-
+ final long[] bbMin = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/Boundingbox_min", long[].class );
+ final long[] bbMax = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/Boundingbox_max", long[].class );
+
final BoundingBox boundingBox = new BoundingBox( new FinalInterval( bbMin, bbMax ) );
- final boolean preserveAnisotropy = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/PreserveAnisotropy", boolean.class );
- final double anisotropyFactor = preserveAnisotropy ? driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/AnisotropyFactor", double.class ) : Double.NaN;
- final int[] blockSize = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/BlockSize", int[].class );
+ final boolean preserveAnisotropy = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/PreserveAnisotropy", boolean.class );
+ final double anisotropyFactor = preserveAnisotropy ? driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/AnisotropyFactor", double.class ) : Double.NaN;
+ final int[] blockSize = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/BlockSize", int[].class );
- final DataType dataType = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/DataType", DataType.class );
+ final DataType dataType = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/DataType", DataType.class );
System.out.println( "FusionFormat: " + fusionFormat );
System.out.println( "FusionType: " + fusionType );
@@ -289,8 +302,8 @@ else if ( outputPathURIString.toLowerCase().endsWith( ".h5" ) || outPathURI.toSt
double minI = Double.NaN, maxI = Double.NaN;
try
{
- minI = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/MinIntensity", double.class );
- maxI = driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/MaxIntensity", double.class );
+ minI = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/MinIntensity", double.class );
+ maxI = driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/MaxIntensity", double.class );
}
catch ( Exception e )
{
@@ -304,7 +317,7 @@ else if ( outputPathURIString.toLowerCase().endsWith( ".h5" ) || outPathURI.toSt
System.out.println( "maxIntensity: " + maxI );
final MultiResolutionLevelInfo[][] mrInfos =
- driverVolumeWriter.getAttribute( "/", "Bigstitcher-Spark/MultiResolutionInfos", MultiResolutionLevelInfo[][].class );
+ driverVolumeWriter.getAttribute( getContainerGroupPath(), "Bigstitcher-Spark/MultiResolutionInfos", MultiResolutionLevelInfo[][].class );
System.out.println( "Loaded " + mrInfos.length + " metadata object for fused " + storageType + " volume(s)" );
@@ -462,8 +475,6 @@ else if ( intensityN5PathURIString.toLowerCase().endsWith( ".h5" ) || intensityN
System.out.println( "numJobs = " + grid.size() );
- //driverVolumeWriter.setAttribute( n5Dataset, "offset", minBB );
-
final RetryTrackerSpark retryTracker =
RetryTrackerSpark.forGridBlocks("s0 block processing", grid.size());
@@ -540,11 +551,7 @@ else if ( dataType == DataType.UINT16 )
return gridBlock;
// load intensity correction coefficients for all overlapping views
-
-
final Map< ViewId, Coefficients > coefficients;
-
-
if ( intensityN5PathURI != null )
{
coefficients = new HashMap<>();
@@ -598,7 +605,6 @@ else if ( dataType == DataType.UINT16 )
System.out.println( "Fusing block: offset=" + Util.printCoordinates( gridBlock[0] ) + ", dimension=" + Util.printCoordinates( gridBlock[1] ) );
// returns a zero-min interval
- //blockSupplier = BlkAffineFusion.init(
blockSupplier = BlkAffineFusion.initWithIntensityCoefficients(
conv,
dataLocal.getSequenceDescription().getImgLoader(),
@@ -760,18 +766,18 @@ else if ( dataType == DataType.UINT16 )
rddDSResult.cache();
rddDSResult.count();
-
+
// extract all blocks that failed
final Set failedBlocksSet =
retryTrackerDS.processWithSpark( rddDSResult, grid );
-
+
// Use RetryTracker to handle retry counting and removal
if (!retryTrackerDS.processFailures(failedBlocksSet))
{
System.out.println( "Stopping." );
System.exit( 1 );
}
-
+
// Update grid for next iteration with remaining failed blocks
grid.clear();
grid.addAll(failedBlocksSet);
@@ -785,13 +791,6 @@ else if ( dataType == DataType.UINT16 )
// close main writer (is shared over Spark-threads if it's HDF5, thus just closing it here)
driverVolumeWriter.close();
- /*
- if ( multiRes )
- System.out.println( "Saved, e.g. view with './n5-view -i " + n5PathURI + " -d " + n5Dataset.substring( 0, n5Dataset.length() - 3) + "'" );
- else
- System.out.println( "Saved, e.g. view with './n5-view -i " + n5PathURI + " -d " + n5Dataset + "'" );
- */
-
System.out.println( "done, took: " + (System.currentTimeMillis() - totalTime ) + " ms." );
sc.close();
diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SparkGeometricDescriptorMatching.java b/src/main/java/net/preibisch/bigstitcher/spark/SparkGeometricDescriptorMatching.java
index ad4103c..ea85983 100644
--- a/src/main/java/net/preibisch/bigstitcher/spark/SparkGeometricDescriptorMatching.java
+++ b/src/main/java/net/preibisch/bigstitcher/spark/SparkGeometricDescriptorMatching.java
@@ -24,6 +24,7 @@
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@@ -283,7 +284,7 @@ else if ( ransacIterations == null )
} );
// load & transform all interest points
- final Map< ViewId, HashMap< String, List< InterestPoint > > > interestpoints =
+ final Map< ViewId, HashMap< String, Collection< InterestPoint > > > interestpoints =
TransformationTools.getAllTransformedInterestPoints(
views,
data.getViewRegistrations().getViewRegistrations(),
@@ -299,8 +300,8 @@ else if ( ransacIterations == null )
interestpoints, groups, data.getViewRegistrations().getViewRegistrations(), data.getSequenceDescription().getViewDescriptions() );
System.out.println( Group.pvid( task.vA ) + " (" + task.labelA + ") <=> " + Group.pvid( task.vB ) + " (" + task.labelB + "): Remaining interest points for alignment: " );
- for ( final Entry< ViewId, HashMap< String, List< InterestPoint > > > element: interestpoints.entrySet() )
- for ( final Entry< String, List< InterestPoint > > subElement : element.getValue().entrySet() )
+ for ( final Entry< ViewId, HashMap< String, Collection< InterestPoint > > > element: interestpoints.entrySet() )
+ for ( final Entry< String, Collection< InterestPoint > > subElement : element.getValue().entrySet() )
System.out.println( Group.pvid( element.getKey() ) + ", '" + subElement.getKey() + "' : " + subElement.getValue().size() );
}
@@ -390,7 +391,7 @@ else if ( ransacIterations == null )
} );
// load & transform all interest points
- final Map< ViewId, HashMap< String, List< InterestPoint > > > interestpoints =
+ final Map< ViewId, HashMap< String, Collection< InterestPoint >> > interestpoints =
TransformationTools.getAllTransformedInterestPoints(
views,
data.getViewRegistrations().getViewRegistrations(),
@@ -410,12 +411,12 @@ else if ( ransacIterations == null )
TransformationTools.filterForOverlappingInterestPoints( interestpoints, groups, data.getViewRegistrations().getViewRegistrations(), data.getSequenceDescription().getViewDescriptions() );
System.out.println( task.vA + " (" + task.labelA + ") <=> " + task.vB + " (" + task.labelB + "): Remaining interest points for alignment: " );
- for ( final Entry< ViewId, HashMap< String, List< InterestPoint > > > element: interestpoints.entrySet() )
- for ( final Entry< String, List< InterestPoint > > subElement : element.getValue().entrySet() )
+ for ( final Entry< ViewId, HashMap< String, Collection< InterestPoint > > > element: interestpoints.entrySet() )
+ for ( final Entry< String, Collection< InterestPoint > > subElement : element.getValue().entrySet() )
System.out.println( Group.pvid( element.getKey() ) + ", '" + subElement.getKey() + "' : " + subElement.getValue().size() );
}
- final Map< Group< ViewId >, HashMap< String, List< GroupedInterestPoint< ViewId > > > > groupedInterestpoints = new HashMap<>();
+ final Map< Group< ViewId >, HashMap< String, Collection< GroupedInterestPoint< ViewId > > > > groupedInterestpoints = new HashMap<>();
final InterestPointGroupingMinDistance< ViewId > ipGrouping
= new InterestPointGroupingMinDistance<>( interestPointMergeDistance, interestpoints );
diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SparkPairwiseStitching.java b/src/main/java/net/preibisch/bigstitcher/spark/SparkPairwiseStitching.java
index 0a0b816..68dcbed 100644
--- a/src/main/java/net/preibisch/bigstitcher/spark/SparkPairwiseStitching.java
+++ b/src/main/java/net/preibisch/bigstitcher/spark/SparkPairwiseStitching.java
@@ -30,6 +30,8 @@
import java.util.concurrent.Executors;
import java.util.stream.Collectors;
+import mpicbg.spim.data.registration.ViewTransform;
+import mpicbg.spim.data.registration.ViewTransformAffine;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
@@ -331,13 +333,19 @@ public Void call() throws Exception
System.out.println( new Date( System.currentTimeMillis() ) + ": Remaining pairs: " + results.size() );
-
// update StitchingResults with Results
for ( final PairwiseStitchingResult< ViewId > psr : results )
{
if (psr == null)
continue;
+ // update the registrations transformations
+ psr.pair().getA().getViews().forEach( viewId -> {
+ dataGlobal.getViewRegistrations().getViewRegistration(viewId)
+ .preconcatenateTransform(new ViewTransformAffine(
+ "Stitching Transform",
+ new AffineTransform3D().concatenate(psr.getInverseTransform())));
+ });
dataGlobal.getStitchingResults().setPairwiseResultForPair(psr.pair(), psr );
}
diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java b/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java
index 45c6423..32f710a 100644
--- a/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java
+++ b/src/main/java/net/preibisch/bigstitcher/spark/SparkResaveN5.java
@@ -32,20 +32,11 @@
import java.util.concurrent.Callable;
import java.util.stream.Collectors;
-import org.apache.spark.SparkConf;
-import org.apache.spark.api.java.JavaRDD;
-import org.apache.spark.api.java.JavaSparkContext;
-import org.bigdataviewer.n5.N5CloudImageLoader;
-import org.janelia.saalfeldlab.n5.Compression;
-import org.janelia.saalfeldlab.n5.DataType;
-import org.janelia.saalfeldlab.n5.N5Writer;
-import org.janelia.saalfeldlab.n5.universe.StorageFormat;
-
import bdv.img.n5.N5ImageLoader;
import mpicbg.spim.data.sequence.ViewId;
+import mpicbg.spim.data.sequence.VoxelDimensions;
import net.imglib2.util.Util;
import net.imglib2.util.ValuePair;
-import net.preibisch.bigstitcher.spark.CreateFusionContainer.Compressions;
import net.preibisch.bigstitcher.spark.abstractcmdline.AbstractBasic;
import net.preibisch.bigstitcher.spark.util.Import;
import net.preibisch.bigstitcher.spark.util.N5Util;
@@ -58,6 +49,14 @@
import net.preibisch.mvrecon.fiji.spimdata.imgloaders.AllenOMEZarrLoader.OMEZARREntry;
import net.preibisch.mvrecon.process.n5api.N5ApiTools;
import net.preibisch.mvrecon.process.n5api.N5ApiTools.MultiResolutionLevelInfo;
+import org.apache.spark.SparkConf;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.bigdataviewer.n5.N5CloudImageLoader;
+import org.janelia.saalfeldlab.n5.Compression;
+import org.janelia.saalfeldlab.n5.DataType;
+import org.janelia.saalfeldlab.n5.N5Writer;
+import org.janelia.saalfeldlab.n5.universe.StorageFormat;
import picocli.CommandLine;
import picocli.CommandLine.Option;
import util.URITools;
@@ -96,7 +95,7 @@ public class SparkResaveN5 extends AbstractBasic implements Callable, Seri
@Option(names = {"-c", "--compression"}, defaultValue = "Zstandard", showDefaultValue = CommandLine.Help.Visibility.ALWAYS,
description = "Dataset compression")
- private Compressions compression = null;
+ private Compressions compressionType = null;
@Option(names = {"-cl", "--compressionLevel" }, description = "compression level, if supported by the codec (default: gzip 1, Zstandard 3, xz 6)")
private Integer compressionLevel = null;
@@ -109,33 +108,8 @@ public Void call() throws Exception
{
this.setRegion();
- /*
- Exception in thread "main" java.lang.IllegalAccessError: tried to access method com.google.common.collect.ImmutableList$Builder.(I)V from class com.google.common.collect.Streams
- at com.google.common.collect.Streams.concat(Streams.java:204)
- at org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.coordinateTransformations.TransformationUtils.tranformsToAffine(TransformationUtils.java:27)
- at org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMultiScaleMetadata.buildMetadata(OmeNgffMultiScaleMetadata.java:159)
- at org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMultiScaleMetadata.(OmeNgffMultiScaleMetadata.java:101)
- at org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMultiScaleMetadata.(OmeNgffMultiScaleMetadata.java:91)
- at net.preibisch.mvrecon.fiji.spimdata.imgloaders.OMEZarrAttibutes.createOMEZarrMetadata(OMEZarrAttibutes.java:128)
- at net.preibisch.mvrecon.process.n5api.N5ApiTools.setupBdvDatasetsOMEZARR(N5ApiTools.java:422)
- at net.preibisch.bigstitcher.spark.SparkResaveN5.lambda$call$1(SparkResaveN5.java:219)
- */
-
- /*
- local:
- com.google.common.collect.ImmutableList: file:/home/preibischs@hhmi.org/.m2/repository/com/google/guava/guava/33.3.1-jre/guava-33.3.1-jre.jar
- com.google.common.collect.Streams: file:/home/preibischs@hhmi.org/.m2/repository/com/google/guava/guava/33.3.1-jre/guava-33.3.1-jre.jar
- */
-
- /*
- cluster:
- com.google.common.collect.ImmutableList: file:/misc/local/spark-3.4.1/jars/guava-14.0.1.jar
- com.google.common.collect.Streams: file:/groups/scicompsoft/home/preibischs/Keller/BigStitcher-Spark-0.1.0-SNAPSHOT.jar
- */
-
System.out.println( "com.google.common.collect.ImmutableList: " + com.google.common.collect.ImmutableList.class.getProtectionDomain().getCodeSource().getLocation() );
System.out.println( "com.google.common.collect.Streams: " + com.google.common.collect.Streams.class.getProtectionDomain().getCodeSource().getLocation() );
- //System.exit( 0 );
final SpimData2 dataGlobal = this.loadSpimData2();
@@ -164,7 +138,7 @@ public Void call() throws Exception
}
final URI n5PathURI = URITools.toURI( this.n5PathURIString == null ? URITools.appendName( URITools.getParentURI( xmlOutURI ), (useN5 ? "dataset.n5" : "dataset.ome.zarr") ) : n5PathURIString );
- final Compression compression = N5Util.getCompression( this.compression, this.compressionLevel );
+ final Compression compression = N5Util.getCompression( this.compressionType, this.compressionLevel );
final int[] blockSize = Import.csvStringToIntArray(blockSizeString);
final int[] blockScale = Import.csvStringToIntArray(blockScaleString);
@@ -174,10 +148,9 @@ public Void call() throws Exception
blockSize[1] * blockScale[ 1 ],
blockSize[2] * blockScale[ 2 ] };
- //final N5Writer n5 = new N5FSWriter(n5Path);
final N5Writer n5Writer = URITools.instantiateN5Writer( useN5 ? StorageFormat.N5 : StorageFormat.ZARR, n5PathURI );
- System.out.println( "Compression: " + this.compression );
+ System.out.println( "Compression: " + this.compressionType );
System.out.println( "Compression level: " + ( compressionLevel == null ? "default" : compressionLevel ) );
System.out.println( "N5 block size=" + Util.printCoordinates( blockSize ) );
System.out.println( "Compute block size=" + Util.printCoordinates( computeBlockSize ) );
@@ -242,13 +215,14 @@ public Void call() throws Exception
else
{
System.out.println( Arrays.toString( blockSize ) );
-
+ VoxelDimensions vx = dataGlobal.getSequenceDescription().getViewDescription( viewId ).getViewSetup().getVoxelSize();
mrInfo = N5ApiTools.setupBdvDatasetsOMEZARR(
n5Writer,
viewId,
dataTypes.get( viewId.getViewSetupId() ),
dimensions.get( viewId.getViewSetupId() ),
- //dataGlobal.getSequenceDescription().getViewDescription( viewId ).getViewSetup().getVoxelSize().dimensionsAsDoubleArray(), // TODO: this is a hack for now
+ vx.dimensionsAsDoubleArray(),
+ vx.unit(),
compression,
blockSize,
downsamplings);
@@ -267,8 +241,6 @@ public Void call() throws Exception
if ( localSparkBindAddress )
conf.set("spark.driver.bindAddress", "127.0.0.1");
- //System.exit( 0 );
-
final JavaSparkContext sc = new JavaSparkContext(conf);
sc.setLogLevel("ERROR");
@@ -361,8 +333,7 @@ public Void call() throws Exception
}
final JavaRDD rddsN = sc.parallelize(allBlocks, Math.min( Spark.maxPartitions, allBlocks.size() ) );
-
-
+
final JavaRDD rdds0Result = rddsN.map( gridBlock ->
{
final N5Writer n5Lcl = URITools.instantiateN5Writer( useN5 ? StorageFormat.N5 : StorageFormat.ZARR, n5PathURI );
diff --git a/src/main/java/net/preibisch/bigstitcher/spark/SplitDatasets.java b/src/main/java/net/preibisch/bigstitcher/spark/SplitDatasets.java
index 8489335..f2fff98 100644
--- a/src/main/java/net/preibisch/bigstitcher/spark/SplitDatasets.java
+++ b/src/main/java/net/preibisch/bigstitcher/spark/SplitDatasets.java
@@ -41,7 +41,7 @@ public class SplitDatasets extends AbstractBasic
private boolean disableOptimization = false;
@Option(names = { "-fip", "--fakeInterestPoints" }, description = "add fake interest points to overlapping regions of split images/views")
- private boolean fakeInterestPoints = false;
+ private Split_Views.InterestPointAdding fakeInterestPoints = Split_Views.InterestPointAdding.NONE;
@Option(names = { "--fipDensity" }, description = "density of fake interest points; number of points per 100x100x100 px volume (default: 100.0)")
private double fipDensity = 100.0;
diff --git a/src/main/java/net/preibisch/bigstitcher/spark/util/N5Util.java b/src/main/java/net/preibisch/bigstitcher/spark/util/N5Util.java
index 7d5b52c..a93a81f 100644
--- a/src/main/java/net/preibisch/bigstitcher/spark/util/N5Util.java
+++ b/src/main/java/net/preibisch/bigstitcher/spark/util/N5Util.java
@@ -36,7 +36,7 @@
import org.janelia.saalfeldlab.n5.universe.StorageFormat;
import org.janelia.scicomp.n5.zstandard.ZstandardCompression;
-import net.preibisch.bigstitcher.spark.CreateFusionContainer.Compressions;
+import net.preibisch.bigstitcher.spark.Compressions;
import net.preibisch.legacy.io.IOFunctions;
import util.URITools;
diff --git a/src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelLoader.java b/src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelLoader.java
new file mode 100644
index 0000000..2b07631
--- /dev/null
+++ b/src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelLoader.java
@@ -0,0 +1,62 @@
+/*-
+ * #%L
+ * Software for the reconstruction of multi-view microscopic acquisitions
+ * like Selective Plane Illumination Microscopy (SPIM) Data.
+ * %%
+ * Copyright (C) 2012 - 2025 Multiview Reconstruction developers.
+ * %%
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as
+ * published by the Free Software Foundation, either version 2 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public
+ * License along with this program. If not, see
+ * .
+ * #L%
+ */
+package net.preibisch.mvrecon.dataset;
+
+import java.net.URI;
+import java.util.Map;
+
+import bdv.img.n5.N5ImageLoader;
+import bdv.img.n5.N5Properties;
+import mpicbg.spim.data.generic.sequence.AbstractSequenceDescription;
+import mpicbg.spim.data.sequence.ViewId;
+import org.janelia.saalfeldlab.n5.universe.StorageFormat;
+import util.URITools;
+
+public class N5MultichannelLoader extends N5ImageLoader
+{
+ private final AbstractSequenceDescription< ?, ?, ? > sequenceDescription;
+
+ private final Map< ViewId, String > viewIdToPath;
+
+ public N5MultichannelLoader(
+ final URI n5URI,
+ final StorageFormat storageFormat,
+ final AbstractSequenceDescription< ?, ?, ? > sequenceDescription,
+ final Map< ViewId, String > viewIdToPath )
+ {
+ super( URITools.instantiateN5Reader( storageFormat, n5URI ), n5URI, sequenceDescription );
+ this.sequenceDescription = sequenceDescription;
+
+ this.viewIdToPath = viewIdToPath;
+ }
+
+ @Override
+ protected N5Properties createN5PropertiesInstance()
+ {
+ return new N5MultichannelProperties( sequenceDescription, viewIdToPath );
+ }
+
+ public Map getViewIdToPath() {
+ return viewIdToPath;
+ }
+}
diff --git a/src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelProperties.java b/src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelProperties.java
new file mode 100644
index 0000000..2556741
--- /dev/null
+++ b/src/main/java/net/preibisch/mvrecon/dataset/N5MultichannelProperties.java
@@ -0,0 +1,153 @@
+/*-
+ * #%L
+ * Software for the reconstruction of multi-view microscopic acquisitions
+ * like Selective Plane Illumination Microscopy (SPIM) Data.
+ * %%
+ * Copyright (C) 2012 - 2025 Multiview Reconstruction developers.
+ * %%
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as
+ * published by the Free Software Foundation, either version 2 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public
+ * License along with this program. If not, see
+ * .
+ * #L%
+ */
+package net.preibisch.mvrecon.dataset;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+
+import bdv.img.n5.N5Properties;
+import mpicbg.spim.data.generic.sequence.AbstractSequenceDescription;
+import mpicbg.spim.data.sequence.TimePoint;
+import mpicbg.spim.data.sequence.ViewId;
+import org.janelia.saalfeldlab.n5.DataType;
+import org.janelia.saalfeldlab.n5.N5Reader;
+
+public class N5MultichannelProperties implements N5Properties
+{
+ private final AbstractSequenceDescription< ?, ?, ? > sequenceDescription;
+
+ private final Map< ViewId, String > viewIdToPath;
+ private int numMipmapLevels;
+
+ public N5MultichannelProperties(
+ final AbstractSequenceDescription< ?, ?, ? > sequenceDescription,
+ final Map< ViewId, String > viewIdToPath )
+ {
+ this.sequenceDescription = sequenceDescription;
+ this.viewIdToPath = viewIdToPath;
+ this.numMipmapLevels = -1;
+ }
+
+ private String getPath( final int setupId, final int timepointId )
+ {
+ return viewIdToPath.get( new ViewId( timepointId, setupId ) );
+ }
+
+ @Override
+ public String getDatasetPath( final int setupId, final int timepointId, final int level )
+ {
+ return String.format( getPath( setupId, timepointId )+ "/s%d", level );
+ }
+
+ @Override
+ public DataType getDataType( final N5Reader n5, final int setupId )
+ {
+ return N5MultichannelProperties.getDataType( this, n5, setupId );
+ }
+
+ @Override
+ public double[][] getMipmapResolutions( final N5Reader n5, final int setupId )
+ {
+ return getMipMapResolutions( this, n5, setupId );
+ }
+
+ @Override
+ public long[] getDimensions( final N5Reader n5, final int setupId, final int timepointId, final int level )
+ {
+ final String path = getDatasetPath( setupId, timepointId, level );
+ final long[] dimensions = n5.getDatasetAttributes( path ).getDimensions();
+ return Arrays.copyOf( dimensions, 3 );
+ }
+
+ public T getRootAttribute( N5Reader n5, String attributeKey, Class attributeType )
+ {
+ return n5.getAttribute("", attributeKey, attributeType);
+ }
+
+ public T getAttribute( N5Reader n5, int setupId, int timepointId, int level, String attributeKey, Class attributeType )
+ {
+ String path;
+ if (level >= 0) {
+ path = getDatasetPath( setupId, timepointId, level );
+ } else {
+ path = getPath( setupId, timepointId );
+ }
+ return n5.getAttribute(path, attributeKey, attributeType);
+ }
+
+ private int getNumMipmapLevels( final N5Reader n5, final int setupId, final int timepointId )
+ {
+ if ( numMipmapLevels >=0 )
+ return numMipmapLevels;
+
+ final String path = getPath( setupId, timepointId );
+ String[] subgroups = n5.list(path);
+ numMipmapLevels = subgroups != null ? subgroups.length : 0;
+ return numMipmapLevels;
+ }
+
+ //
+ // static methods
+ //
+
+ private static int getFirstAvailableTimepointId( final AbstractSequenceDescription< ?, ?, ? > seq, final int setupId )
+ {
+ for ( final TimePoint tp : seq.getTimePoints().getTimePointsOrdered() )
+ {
+ if ( seq.getMissingViews() == null || seq.getMissingViews().getMissingViews() == null || !seq.getMissingViews().getMissingViews().contains( new ViewId( tp.getId(), setupId ) ) )
+ return tp.getId();
+ }
+
+ throw new RuntimeException( "All timepoints for setupId " + setupId + " are declared missing. Stopping." );
+ }
+
+ private static DataType getDataType(final N5MultichannelProperties n5properties, final N5Reader n5, final int setupId )
+ {
+ final int timePointId = getFirstAvailableTimepointId( n5properties.sequenceDescription, setupId );
+ return n5.getDatasetAttributes( n5properties.getDatasetPath( setupId, timePointId, 0 ) ).getDataType();
+ }
+
+ private static double[][] getMipMapResolutions(final N5MultichannelProperties n5properties, final N5Reader n5, final int setupId )
+ {
+ final int timePointId = getFirstAvailableTimepointId( n5properties.sequenceDescription, setupId );
+
+ // read scales and pixelResolution attributes from the base container and build the mipmap resolutions from that
+ List scales = new ArrayList<>();
+ int numLevels = n5properties.getNumMipmapLevels(n5, setupId, timePointId);
+ for (int level = 0; level < numLevels; level++ ) {
+ double[] pixelResolution = n5properties.getAttribute(n5, setupId, timePointId, level, "pixelResolution", double[].class);
+ double[] downSamplingFactors = n5properties.getAttribute(n5, setupId, timePointId, level, "downsamplingFactors", double[].class);
+ if (pixelResolution != null) {
+ if (downSamplingFactors != null) {
+ for (int d = 0; d < pixelResolution.length && d < downSamplingFactors.length; d++) {
+ pixelResolution[d] *= downSamplingFactors[d];
+ }
+ }
+ scales.add(pixelResolution);
+ }
+ }
+ return !scales.isEmpty() ? scales.toArray( new double[0][]) : new double[][] { { 1, 1, 1 } };
+ }
+}
diff --git a/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java b/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java
new file mode 100644
index 0000000..68f3068
--- /dev/null
+++ b/src/main/java/net/preibisch/mvrecon/dataset/SpimDatasetBuilder.java
@@ -0,0 +1,516 @@
+package net.preibisch.mvrecon.dataset;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.UncheckedIOException;
+import java.net.URI;
+import java.nio.file.FileSystems;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.PathMatcher;
+import java.nio.file.Paths;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
+import loci.formats.ChannelSeparator;
+import loci.formats.IFormatReader;
+import loci.formats.meta.MetadataRetrieve;
+import mpicbg.spim.data.generic.sequence.BasicViewDescription;
+import mpicbg.spim.data.registration.ViewRegistrations;
+import mpicbg.spim.data.sequence.Angle;
+import mpicbg.spim.data.sequence.Channel;
+import mpicbg.spim.data.sequence.FinalVoxelDimensions;
+import mpicbg.spim.data.sequence.Illumination;
+import mpicbg.spim.data.sequence.SequenceDescription;
+import mpicbg.spim.data.sequence.Tile;
+import mpicbg.spim.data.sequence.TimePoint;
+import mpicbg.spim.data.sequence.TimePoints;
+import mpicbg.spim.data.sequence.ViewDescription;
+import mpicbg.spim.data.sequence.ViewId;
+import mpicbg.spim.data.sequence.ViewSetup;
+import mpicbg.spim.data.sequence.VoxelDimensions;
+import net.imglib2.Dimensions;
+import net.imglib2.FinalDimensions;
+import net.preibisch.mvrecon.fiji.datasetmanager.DatasetCreationUtils;
+import net.preibisch.mvrecon.fiji.spimdata.SpimData2;
+import net.preibisch.mvrecon.fiji.spimdata.boundingbox.BoundingBoxes;
+import net.preibisch.mvrecon.fiji.spimdata.imgloaders.FileMapImgLoaderLOCI;
+import net.preibisch.mvrecon.fiji.spimdata.imgloaders.LegacyStackImgLoaderLOCI;
+import net.preibisch.mvrecon.fiji.spimdata.imgloaders.filemap2.FileMapEntry;
+import net.preibisch.mvrecon.fiji.spimdata.intensityadjust.IntensityAdjustments;
+import net.preibisch.mvrecon.fiji.spimdata.interestpoints.ViewInterestPoints;
+import net.preibisch.mvrecon.fiji.spimdata.pointspreadfunctions.PointSpreadFunctions;
+import net.preibisch.mvrecon.fiji.spimdata.stitchingresults.StitchingResults;
+import ome.units.UNITS;
+import ome.units.quantity.Length;
+import org.apache.commons.lang3.builder.ToStringBuilder;
+import org.janelia.saalfeldlab.n5.N5FSReader;
+import org.janelia.saalfeldlab.n5.N5Reader;
+import org.janelia.saalfeldlab.n5.universe.StorageFormat;
+
+public class SpimDatasetBuilder {
+
+ static class ViewIndex {
+ final int tp, ch, il, ang;
+
+ ViewIndex(int tp, int ch, int il, int ang) {
+ this.tp = tp;
+ this.ch = ch;
+ this.il = il;
+ this.ang = ang;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == null || getClass() != o.getClass()) return false;
+ ViewIndex tileIndex = (ViewIndex) o;
+ return tp == tileIndex.tp && ch == tileIndex.ch && il == tileIndex.il && ang == tileIndex.ang;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(tp, ch, il, ang);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringBuilder(this)
+ .append("tp", tp)
+ .append("ch", ch)
+ .append("il", il)
+ .append("ang", ang)
+ .toString();
+ }
+ }
+
+ static class StackFile {
+ final ViewIndex viewIndex;
+ final int ti;
+ final URI baseURI;
+ final String relativeFilePath;
+ int nImages = -1;
+ int nTp = -1;
+ int nCh = -1;
+ int sizeZ = -1;
+ int sizeY = -1;
+ int sizeX = -1;
+
+ StackFile(int tp, int ch, int il, int ang, int ti, URI baseURI, String relativeFilePath )
+ {
+ this.viewIndex = new ViewIndex(tp, ch, il, ang);
+ this.ti = ti;
+ this.baseURI = baseURI;
+ this.relativeFilePath = relativeFilePath;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == null || getClass() != o.getClass()) return false;
+ StackFile stackFile = (StackFile) o;
+ return ti == stackFile.ti &&
+ Objects.equals(viewIndex, stackFile.viewIndex) &&
+ Objects.equals(baseURI, stackFile.baseURI) &&
+ Objects.equals(relativeFilePath, stackFile.relativeFilePath);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(viewIndex, ti, baseURI, relativeFilePath);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringBuilder(this)
+ .append("view", viewIndex)
+ .append("ti", ti)
+ .append("baseURI", baseURI)
+ .append("relativeFilePath", relativeFilePath)
+ .toString();
+ }
+
+ Path getFilePath() {
+ return Paths.get(baseURI).resolve(relativeFilePath);
+ }
+
+ int getTp() {
+ return viewIndex.tp;
+ }
+
+ int getTi() {
+ return ti;
+ }
+
+ public int getCh() {
+ return viewIndex.ch;
+ }
+
+ public int getAng() {
+ return viewIndex.ang;
+ }
+
+ public int getIl() {
+ return viewIndex.il;
+ }
+ }
+
+ interface ViewSetupBuilder {
+ SequenceDescription getSequenceDescription();
+ ViewSetupBuilder setImgLoader();
+ ViewSetupBuilder createViewSetups(List stackFiles);
+ }
+
+
+ static class LOCIViewSetupBuilder implements ViewSetupBuilder {
+
+ private final SequenceDescription sequenceDescription;
+ private final Map viewToStackFileMap = new HashMap<>();
+
+ LOCIViewSetupBuilder(TimePoints timePoints) {
+ this.sequenceDescription = new SequenceDescription(
+ timePoints,
+ /*view setups*/Collections.emptyList()
+ );
+ }
+
+ @Override
+ public SequenceDescription getSequenceDescription() {
+ return sequenceDescription;
+ }
+
+ @Override
+ public LOCIViewSetupBuilder setImgLoader() {
+ Map, FileMapEntry> fileMap = new HashMap<>();
+ for (ViewSetup vs : sequenceDescription.getViewSetupsOrdered()) {
+ StackFile stackFile = viewToStackFileMap.get(vs.getId());
+ ViewDescription vdI = sequenceDescription.getViewDescription( stackFile.getTp(), vs.getId() );
+ FileMapEntry fileMapEntry = new FileMapEntry(
+ stackFile.getFilePath().toFile(),
+ vs.getTile().getId() - (stackFile.getTi() * stackFile.nImages), // recreate the image index within the file
+ vs.getChannel().getId());
+ fileMap.put( vdI, fileMapEntry);
+ }
+
+ sequenceDescription.setImgLoader(new FileMapImgLoaderLOCI(
+ fileMap,
+ sequenceDescription,
+ false
+ ));
+ return this;
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public LOCIViewSetupBuilder createViewSetups(List stackFiles) {
+ int nfiles = stackFiles.size();
+ for (int sfi = 0; sfi < nfiles; sfi++) {
+ StackFile stackFile = stackFiles.get(sfi);
+ File tileFile = stackFile.getFilePath().toFile();
+ if ( !tileFile.exists() )
+ {
+ continue;
+ }
+ IFormatReader formatReader = new ChannelSeparator();
+ try {
+ if ( !LegacyStackImgLoaderLOCI.createOMEXMLMetadata( formatReader ) ) {
+ try {
+ formatReader.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ continue;
+ }
+
+ formatReader.setId( tileFile.toString() );
+
+ MetadataRetrieve retrieve = (MetadataRetrieve)formatReader.getMetadataStore();
+
+ stackFile.nImages = retrieve.getImageCount();
+ stackFile.nTp = formatReader.getSizeT();
+ stackFile.nCh = formatReader.getSizeC();
+ stackFile.sizeZ = formatReader.getSizeZ();
+ stackFile.sizeY = formatReader.getSizeY();
+ stackFile.sizeX = formatReader.getSizeX();
+ for (int imageIndex = 0; imageIndex < stackFile.nImages; imageIndex++) {
+ Length offsetX = retrieve.getPlanePositionX(imageIndex, 0);
+ Length offsetY = retrieve.getPlanePositionY(imageIndex, 0);
+ Length offsetZ = retrieve.getPlanePositionZ(imageIndex, 0);
+ Length resX = retrieve.getPixelsPhysicalSizeX(imageIndex);
+ Length resY = retrieve.getPixelsPhysicalSizeY(imageIndex);
+ Length resZ = retrieve.getPixelsPhysicalSizeZ(imageIndex);
+
+ double oX = offsetX != null ? offsetX.value(UNITS.MICROMETER).doubleValue() : 0;
+ double oY = offsetY != null ? offsetY.value(UNITS.MICROMETER).doubleValue() : 0;
+ double oZ = offsetZ != null ? offsetZ.value(UNITS.MICROMETER).doubleValue() : 0;
+ double rX = resX != null ? resX.value(UNITS.MICROMETER).doubleValue() : 0;
+ double rY = resY != null ? resY.value(UNITS.MICROMETER).doubleValue() : 0;
+ double rZ = resZ != null ? resZ.value(UNITS.MICROMETER).doubleValue() : 0;
+ VoxelDimensions voxelDimensions = new FinalVoxelDimensions("um", rX, rY, rZ);
+ System.out.println("Voxel dimensions: " + voxelDimensions);
+ int imageChannels = retrieve.getChannelCount(imageIndex);
+ for (int chIndex = 0; chIndex < imageChannels; chIndex++) {
+ String chName = retrieve.getChannelName(imageIndex, chIndex);
+ // currently viewIndex is only based on the number of images and channels
+ // but a correct implementation would also consider timepoints, illuminations and angles
+ // for now I am ignoring them because so far we never needed them.
+ int viewIndex = chIndex * nfiles * stackFile.nImages + sfi * stackFile.nImages + imageIndex;
+ Tile tile = new Tile(stackFile.getTi() * stackFile.nImages + imageIndex);
+ tile.setLocation(new double[]{oX, oY, oZ});
+ Channel channel = new Channel(chIndex, chName);
+ ViewSetup vs = new ViewSetup(
+ viewIndex,
+ String.valueOf(viewIndex),
+ new FinalDimensions(stackFile.sizeX, stackFile.sizeY, stackFile.sizeZ),
+ voxelDimensions,
+ tile,
+ channel,
+ new Angle(stackFile.getAng()),
+ new Illumination(stackFile.getIl())
+ );
+ viewToStackFileMap.put(viewIndex, stackFile);
+ ((Map) sequenceDescription.getViewSetups()).put(viewIndex, vs);
+ }
+ }
+ } catch (Exception e) {
+ throw new IllegalStateException("Could not read " + stackFile, e);
+ }
+ }
+ return this;
+ }
+ }
+
+ static class N5MultichannelViewSetupBuilder implements ViewSetupBuilder {
+
+ private final URI n5ContainerURI;
+ private final SequenceDescription sequenceDescription;
+ private final Map viewIdToPath;
+ private final N5Reader n5Reader;
+ private final N5MultichannelProperties n5MultichannelProperties;
+
+ public N5MultichannelViewSetupBuilder(URI n5ContainerURI, TimePoints timePoints) {
+ this.n5ContainerURI = n5ContainerURI;
+ this.sequenceDescription = new SequenceDescription(
+ timePoints,
+ /*view setups*/Collections.emptyList()
+ );
+ this.viewIdToPath = new HashMap<>();
+ n5Reader = new N5FSReader(n5ContainerURI.toString());
+ n5MultichannelProperties = new N5MultichannelProperties(sequenceDescription, viewIdToPath);
+ }
+
+ @Override
+ public SequenceDescription getSequenceDescription() {
+ return sequenceDescription;
+ }
+
+ @Override
+ public N5MultichannelViewSetupBuilder setImgLoader() {
+ sequenceDescription.setImgLoader(
+ new N5MultichannelLoader( n5ContainerURI, StorageFormat.N5, sequenceDescription, viewIdToPath )
+ );
+ return this;
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public N5MultichannelViewSetupBuilder createViewSetups(List stackFiles) {
+ for (int i = 0; i < stackFiles.size(); i++) {
+ StackFile stackFile = stackFiles.get(i);
+ if ( Files.notExists(stackFile.getFilePath()) )
+ {
+ continue;
+ }
+ viewIdToPath.put(new ViewId(stackFile.getTp(), i), stackFile.relativeFilePath);
+
+ Map pixelResolutions = n5MultichannelProperties.getRootAttribute(n5Reader, "pixelResolution", Map.class);
+ VoxelDimensions voxelDimensions;
+ if (pixelResolutions != null) {
+ double[] res = ((List) pixelResolutions.getOrDefault("dimensions", Arrays.asList(1., 1., 1.)))
+ .stream()
+ .mapToDouble(d -> d)
+ .toArray();
+ String resUnits = (String) pixelResolutions.getOrDefault("unit", "voxel");
+ voxelDimensions = new FinalVoxelDimensions(resUnits, res);
+ } else {
+ voxelDimensions = new FinalVoxelDimensions("voxel", 1., 1., 1.);
+ }
+ System.out.println("Voxel dimensions: " + voxelDimensions);
+ long[] dims = n5MultichannelProperties.getDimensions(n5Reader, i, stackFile.getTp(), 0);
+ Dimensions size = new FinalDimensions(dims[0], dims[1], dims[2]);
+ ViewSetup vs = new ViewSetup(
+ i, // in this case view index coincides with stack file index
+ stackFile.relativeFilePath,
+ size,
+ voxelDimensions,
+ new Tile(stackFile.getTi()),
+ new Channel(stackFile.getCh()),
+ new Angle(stackFile.getAng()),
+ new Illumination(stackFile.getIl())
+ );
+ ((Map) sequenceDescription.getViewSetups()).put(i, vs);
+ }
+
+ return this;
+ }
+
+ }
+
+ static class StackPattern {
+ final String sourcePattern;
+ final String globPattern;
+ final Pattern regexPattern;
+ final Set keys;
+
+ StackPattern( String sourcePattern ) {
+ this.sourcePattern = sourcePattern;
+ this.globPattern = sourcePattern
+ .replaceAll("\\{t\\}", "*")
+ .replaceAll("\\{c\\}", "*")
+ .replaceAll("\\{i\\}", "*")
+ .replaceAll("\\{a\\}", "*")
+ .replaceAll("\\{x\\}", "*");
+ this.regexPattern = Pattern.compile( sourcePattern
+ .replaceAll("\\.", "\\\\.") // escape dot
+ .replaceAll("\\*", ".*")
+ .replaceAll("\\{t\\}", "(?\\\\D*?\\\\d+)")
+ .replaceAll("\\{c\\}", "(?\\\\D*?\\\\d+)")
+ .replaceAll("\\{i\\}", "(?\\\\D*?\\\\d+)")
+ .replaceAll("\\{a\\}", "(?\\\\D*?\\\\d+)")
+ .replaceAll("\\{x\\}", "(?\\\\D*?\\\\d+)") );
+
+ this.keys = initializeKeys(sourcePattern);
+ }
+
+ private Set initializeKeys(String sourcePattern) {
+ Set patternKeys = new HashSet<>();
+ String regexStr = Pattern.quote(sourcePattern);
+ Matcher m = Pattern.compile("\\{(t|c|i|a|x)\\}").matcher(regexStr);
+ while (m.find()) {
+ String key = m.group(1);
+ patternKeys.add(key);
+ }
+ return patternKeys;
+ }
+
+ String getGlobPattern() {
+ return "glob:" + globPattern;
+ }
+
+ int getSearchDepth() {
+ return (int) sourcePattern.chars().filter(c -> c == File.separatorChar).count();
+ }
+
+ boolean hasKey(String key) {
+ return keys.contains(key);
+ }
+ }
+
+ private final StackPattern fileNamePattern;
+
+ public SpimDatasetBuilder(String fileNamePattern )
+ {
+ this.fileNamePattern = new StackPattern(fileNamePattern);
+ }
+
+ public SpimData2 createDataset(URI imagePath) {
+ List stackFiles = getStackFiles(imagePath);
+
+ // collect timepoints from stack files
+ Set timePoints = stackFiles.stream()
+ .map(si -> new TimePoint(si.viewIndex.tp))
+ .collect(Collectors.toSet());
+
+ SequenceDescription sequenceDescription = createViewSetupBuilder(imagePath, new TimePoints(timePoints))
+ .createViewSetups(stackFiles)
+ .setImgLoader()
+ .getSequenceDescription();
+
+ // get the min resolution from all calibrations
+ double minResolution = DatasetCreationUtils.minResolution(
+ sequenceDescription,
+ sequenceDescription.getViewDescriptions().values()
+ );
+
+ ViewRegistrations viewRegistrations = DatasetCreationUtils.createViewRegistrations(
+ sequenceDescription.getViewDescriptions(),
+ minResolution
+ );
+
+ ViewInterestPoints viewInterestPoints = new ViewInterestPoints();
+
+ return new SpimData2(
+ imagePath,
+ sequenceDescription,
+ viewRegistrations,
+ viewInterestPoints,
+ new BoundingBoxes(),
+ new PointSpreadFunctions(),
+ new StitchingResults(),
+ new IntensityAdjustments()
+ );
+ }
+
+ /**
+ * So far only local paths are supported.
+ *
+ * @param imageURI
+ * @return
+ */
+ private List getStackFiles(URI imageURI)
+ {
+ int searchDepth = fileNamePattern.getSearchDepth();
+ try {
+ Path imagePath = Paths.get(imageURI);
+ // get the files
+ PathMatcher matcher = FileSystems.getDefault().getPathMatcher(fileNamePattern.getGlobPattern());
+ List fs = Files.walk( imagePath , searchDepth+1)
+ .filter(path -> matcher.matches(imagePath.relativize(path)))
+ .map(p -> getStackFile(imageURI, imagePath.relativize(p).toString()))
+ .collect(Collectors.toList());
+ System.out.println(fs);
+ return fs;
+ } catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
+ }
+
+ private StackFile getStackFile(URI imageURI, String imageRelativePath)
+ {
+ Matcher m = fileNamePattern.regexPattern.matcher(imageRelativePath);
+ if ( m.matches() ) {
+ int tp = extractInt(fileNamePattern.hasKey("t") ? m.group("tp") : "0");
+ int ch = extractInt(fileNamePattern.hasKey("c") ? m.group("ch") : "0");
+ int il = extractInt(fileNamePattern.hasKey("i") ? m.group("il") : "0");
+ int ang = extractInt(fileNamePattern.hasKey("a") ? m.group("ang") : "0");
+ int ti = extractInt(fileNamePattern.hasKey("x") ? m.group("ti") : "0");
+ return new StackFile(tp, ch, il, ang, ti, imageURI, imageRelativePath);
+ } else {
+ throw new IllegalArgumentException(imageRelativePath + " does not match " + fileNamePattern.sourcePattern + ". Refine the pattern and try again");
+ }
+ }
+
+ int extractInt(String input) {
+ Matcher m = Pattern.compile("\\D*(\\d+)").matcher(input);
+ if (m.matches()) {
+ return Integer.parseInt(m.group(1));
+ } else {
+ return 0;
+ }
+ }
+
+ private ViewSetupBuilder createViewSetupBuilder(URI imageURI, TimePoints timePoints) {
+ if ( imageURI.getScheme().equals("n5") || imageURI.getScheme().equals("file") && imageURI.getPath().contains(".n5") ) {
+ return new N5MultichannelViewSetupBuilder(imageURI, timePoints);
+ } else {
+ return new LOCIViewSetupBuilder(timePoints);
+ }
+ }
+}
diff --git a/src/main/java/net/preibisch/mvrecon/dataset/XmlToN5MultichannelLoader.java b/src/main/java/net/preibisch/mvrecon/dataset/XmlToN5MultichannelLoader.java
new file mode 100644
index 0000000..1c07745
--- /dev/null
+++ b/src/main/java/net/preibisch/mvrecon/dataset/XmlToN5MultichannelLoader.java
@@ -0,0 +1,63 @@
+package net.preibisch.mvrecon.dataset;
+
+import java.io.File;
+import java.net.URI;
+import java.util.HashMap;
+import java.util.Map;
+
+import mpicbg.spim.data.XmlHelpers;
+import mpicbg.spim.data.generic.sequence.AbstractSequenceDescription;
+import mpicbg.spim.data.generic.sequence.ImgLoaderIo;
+import mpicbg.spim.data.generic.sequence.XmlIoBasicImgLoader;
+import mpicbg.spim.data.sequence.ViewId;
+import net.preibisch.mvrecon.fiji.spimdata.imgloaders.AllenOMEZarrLoader;
+import org.janelia.saalfeldlab.n5.universe.StorageFormat;
+import org.jdom2.Element;
+
+import static mpicbg.spim.data.XmlKeys.IMGLOADER_FORMAT_ATTRIBUTE_NAME;
+
+@ImgLoaderIo( format = "bdv.multchimg.n5", type = N5MultichannelLoader.class )
+public class XmlToN5MultichannelLoader implements XmlIoBasicImgLoader {
+ @Override
+ public Element toXml(N5MultichannelLoader imgLoader, File basePath) {
+ final Element imgLoaderElement = new Element( "ImageLoader" );
+ imgLoaderElement.setAttribute( IMGLOADER_FORMAT_ATTRIBUTE_NAME, "bdv.multchimg.n5" );
+ imgLoaderElement.setAttribute( "version", "1.0" );
+
+ imgLoaderElement.addContent( XmlHelpers.pathElementURI( "n5", imgLoader.getN5URI(), basePath.toURI() ));
+
+ final Element zgroupsElement = new Element( "n5groups" );
+
+ for ( final Map.Entry entry : imgLoader.getViewIdToPath().entrySet() )
+ {
+ final Element n5groupElement = new Element("n5group");
+ n5groupElement.setAttribute( "setup", String.valueOf( entry.getKey().getViewSetupId() ) );
+ n5groupElement.setAttribute( "tp", String.valueOf( entry.getKey().getTimePointId() ) );
+ n5groupElement.setAttribute( "path", String.valueOf( entry.getValue() ) );
+
+ zgroupsElement.addContent( n5groupElement );
+ }
+
+ imgLoaderElement.addContent( zgroupsElement );
+
+ return imgLoaderElement;
+ }
+
+ @Override
+ public N5MultichannelLoader fromXml(Element elem, File basePath, AbstractSequenceDescription, ?, ?> sequenceDescription) {
+ final Map n5groups = new HashMap<>();
+
+ URI uri = XmlHelpers.loadPathURI( elem, "n5", basePath.toURI() );
+
+ final Element n5groupsElem = elem.getChild( "n5groups" );
+ for ( final Element c : n5groupsElem.getChildren( "n5group" ) )
+ {
+ final int timepointId = Integer.parseInt( c.getAttributeValue( "tp" ) );
+ final int setupId = Integer.parseInt( c.getAttributeValue( "setup" ) );
+ final String path = c.getAttributeValue( "path" );
+ n5groups.put( new ViewId( timepointId, setupId ), path );
+ }
+
+ return new N5MultichannelLoader(uri, StorageFormat.N5, sequenceDescription, n5groups);
+ }
+}