Skip to content

Commit eac1e53

Browse files
fixed issue of missing points between neighboring blocks
1 parent 4110678 commit eac1e53

File tree

1 file changed

+28
-6
lines changed

1 file changed

+28
-6
lines changed

src/main/java/net/preibisch/bigstitcher/spark/SparkInterestPointDetection.java

Lines changed: 28 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -97,6 +97,7 @@
9797
import picocli.CommandLine.Option;
9898
import scala.Tuple2;
9999
import scala.Tuple3;
100+
import scala.Tuple4;
100101
import util.Grid;
101102
import util.URITools;
102103

@@ -234,6 +235,7 @@ public Void call() throws Exception
234235
// assemble all intervals that need to be processed
235236
//
236237
final ArrayList< Pair< ViewId, Interval > > toProcess = new ArrayList<>();
238+
final HashMap< ViewId, long[] > downsampledDimensions = new HashMap<>();
237239

238240
// assemble all pairs for parallelization with Spark
239241
final ArrayList< Tuple2< ViewId, ViewId > > metadataJobs = new ArrayList<>();
@@ -278,7 +280,7 @@ public Void call() throws Exception
278280

279281
final JavaRDD<Tuple2<ViewId, ViewId>> metadataJobsSpark = sc.parallelize( metadataJobs, Math.min( Spark.maxPartitions, metadataJobs.size() ) );
280282

281-
final JavaRDD< ArrayList< Tuple3< ViewId, long[], long[] > > > metadataJobRDD = metadataJobsSpark.map( metaData ->
283+
final JavaRDD< ArrayList< Tuple4< ViewId, long[], long[], long[] > > > metadataJobRDD = metadataJobsSpark.map( metaData ->
282284
{
283285
final SpimData2 dataLocal = Spark.getSparkJobSpimData2( xmlURI );
284286

@@ -301,7 +303,7 @@ public Void call() throws Exception
301303
ds,
302304
true );
303305

304-
final ArrayList< Tuple3< ViewId, long[], long[] > > resultIntervals = new ArrayList<>();
306+
final ArrayList< Tuple4< ViewId, long[], long[], long[] > > resultIntervals = new ArrayList<>();
305307

306308
if ( overlappingOnly )
307309
{
@@ -342,21 +344,23 @@ public Void call() throws Exception
342344
//System.out.println( "intersection=" + Util.printInterval( intersection ) + ", size (#px)=" + size );
343345
//maxIntervalSize = Math.max( maxIntervalSize, size );
344346

345-
resultIntervals.add( new Tuple3<>( metaData._1(), intersection.minAsLongArray(), intersection.maxAsLongArray() ) );
347+
resultIntervals.add( new Tuple4<>( metaData._1(), intersection.minAsLongArray(), intersection.maxAsLongArray(), input.getA().dimensionsAsLongArray() ) );
346348
}
347349

348350
}
349351
else
350352
{
351-
resultIntervals.add( new Tuple3<>( metaData._1(), input.getA().minAsLongArray(), input.getA().maxAsLongArray() ));
353+
resultIntervals.add( new Tuple4<>( metaData._1(), input.getA().minAsLongArray(), input.getA().maxAsLongArray(), input.getA().dimensionsAsLongArray() ));
352354
}
353355

354356
return resultIntervals;
355357
});
356358

357359
metadataJobRDD.collect().forEach(
358-
l -> l.forEach(
359-
md -> toProcess.add(new ValuePair<ViewId, Interval>(md._1(), new FinalInterval(md._2(), md._3())))));
360+
l -> l.forEach( md -> {
361+
toProcess.add(new ValuePair<ViewId, Interval>(md._1(), new FinalInterval(md._2(), md._3())));
362+
downsampledDimensions.put(md._1(), md._4());
363+
}));
360364

361365
long maxIntervalSize = 0;
362366

@@ -395,12 +399,30 @@ public Void call() throws Exception
395399
final long[] superBlockMax = new long[ intervalOffset.length ];
396400
Arrays.setAll( superBlockMax, d -> superBlockMin[ d ] + gridEntry[ 1 ][ d ] - 1 );
397401

402+
// expand each interval boundary that is within an image by one, otherwise there are gaps between neighboring blocks
403+
// as each block does "true convolutions" for the 3x3x3 min/max finding
404+
final long[] dim = downsampledDimensions.get( pair.getA() );
405+
for ( int d = 0; d < superBlockMin.length; ++d )
406+
{
407+
if ( superBlockMin[ d ] > 0 )
408+
--superBlockMin[ d ];
409+
410+
if ( superBlockMax[ d ] < dim[ d ] - 1 )
411+
++superBlockMax[ d ];
412+
}
413+
398414
System.out.println( "Processing " + Group.pvid(pair.getA()) + ", " + Util.printInterval( new FinalInterval(superBlockMin, superBlockMax) ) + " of full interval " + Util.printInterval( pair.getB() ) );
399415
});
400416
}
401417

402418
System.out.println( "Total number of jobs for interest point detection: " + sparkProcess.size() );
403419

420+
if ( sparkProcess.size() == 0 )
421+
{
422+
System.out.println( "Nothing to do, stopping." );
423+
System.exit( 0 );
424+
}
425+
404426
// create temporary N5 folder
405427
final String tempLocation = URITools.appendName( dataGlobal.getBasePathURI(), InterestPointsN5.baseN5 );
406428
final URI tempURI = URITools.toURI( tempLocation );

0 commit comments

Comments
 (0)