4 Copyright (C) 2008 Verena Kaynig.
6 This program is free software; you can redistribute it and/or
7 modify it under the terms of the GNU General Public License
8 as published by the Free Software Foundation (http://www.gnu.org/licenses/gpl.txt )
10 This program is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 GNU General Public License for more details.
15 You should have received a copy of the GNU General Public License
16 along with this program; if not, write to the Free Software
17 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
21 /* **************************************************************************** *
22 * This ImageJ Plugin estimates a non linear lens distortion in *
23 * the images given and corrects all images with the same transformation *
27 * - show histogram of beta coefficients to evaluate kernel dimension needed *
28 * - show SIFT matches before and after correction? *
29 * - give numerical xcorr results in a better manner *
30 * - show visual impression of the stitching ? *
32 * - do mask images properly to incorporate into TrakEM *
34 * Author: Verena Kaynig *
35 * Kontakt: verena.kaynig@inf.ethz.ch *
37 * SIFT implementation: Stephan Saalfeld *
38 * **************************************************************************** */
40 package lenscorrection
;
43 import ij
.gui
.GenericDialog
;
44 import ij
.plugin
.PlugIn
;
45 import ij
.process
.ColorProcessor
;
46 import ij
.process
.ImageProcessor
;
48 import ij
.io
.DirectoryChooser
;
50 import ij
.io
.FileSaver
;
52 import mpi
.fruitfly
.general
.MultiThreading
;
53 import mpicbg
.models
.*;
54 import mpicbg
.ij
.SIFT
;
55 import mpicbg
.imagefeatures
.*;
57 import java
.util
.ArrayList
;
58 import java
.util
.Arrays
;
59 import java
.util
.Collection
;
60 import java
.util
.Collections
;
61 import java
.util
.List
;
62 import java
.util
.concurrent
.atomic
.AtomicInteger
;
63 import java
.awt
.Color
;
64 import java
.awt
.geom
.AffineTransform
;
67 import org
.jfree
.chart
.*;
68 import org
.jfree
.chart
.plot
.PlotOrientation
;
69 import org
.jfree
.data
.category
.DefaultCategoryDataset
;
74 public class Distortion_Correction
implements PlugIn
{
76 static public class PointMatchCollectionAndAffine
78 final public AffineTransform affine
;
79 final public Collection
< PointMatch
> pointMatches
;
80 public PointMatchCollectionAndAffine( final AffineTransform affine
, final Collection
< PointMatch
> pointMatches
)
83 this.pointMatches
= pointMatches
;
87 static public class BasicParam
89 final public FloatArray2DSIFT
.Param sift
= new FloatArray2DSIFT
.Param();
92 * Closest/next closest neighbour distance ratio
94 public float rod
= 0.92f
;
97 * Maximal allowed alignment error in px
99 public float maxEpsilon
= 32.0f
;
102 * Inlier/candidates ratio
104 public float minInlierRatio
= 0.2f
;
107 * Implemeted transformation models for choice
109 final static public String
[] modelStrings
= new String
[]{ "Translation", "Rigid", "Similarity", "Affine" };
110 public int expectedModelIndex
= 1;
113 * Order of the polynomial kernel
115 public int dimension
= 5;
118 * Regularization factor
120 public double lambda
= 0.01;
125 sift
.maxOctaveSize
= 1600;
126 sift
.minOctaveSize
= 400;
129 public void addFields( final GenericDialog gd
)
131 SIFT
.addFields( gd
, sift
);
133 gd
.addNumericField( "closest/next_closest_ratio :", rod
, 2 );
135 gd
.addMessage( "Geometric Consensus Filter :" );
136 gd
.addNumericField( "maximal_alignment_error :", maxEpsilon
, 2, 6, "px" );
137 gd
.addNumericField( "inlier_ratio :", minInlierRatio
, 2 );
138 gd
.addChoice( "expected_transformation :", modelStrings
, modelStrings
[ expectedModelIndex
] );
140 gd
.addMessage( "Lens Model :" );
141 gd
.addNumericField( "power_of_polynomial_kernel :", dimension
, 0 );
142 gd
.addNumericField( "lambda :", lambda
, 6 );
145 public boolean readFields( final GenericDialog gd
)
147 SIFT
.readFields( gd
, sift
);
149 rod
= ( float )gd
.getNextNumber();
151 maxEpsilon
= ( float )gd
.getNextNumber();
152 minInlierRatio
= ( float )gd
.getNextNumber();
153 expectedModelIndex
= gd
.getNextChoiceIndex();
155 dimension
= ( int )gd
.getNextNumber();
156 lambda
= ( double )gd
.getNextNumber();
158 return !gd
.invalidNumber();
161 public boolean setup( final String title
)
163 final GenericDialog gd
= new GenericDialog( title
);
168 if ( gd
.wasCanceled() ) return false;
170 while ( !readFields( gd
) );
176 public BasicParam
clone()
178 final BasicParam p
= new BasicParam();
180 p
.dimension
= dimension
;
181 p
.expectedModelIndex
= expectedModelIndex
;
183 p
.maxEpsilon
= maxEpsilon
;
184 p
.minInlierRatio
= minInlierRatio
;
191 static public class PluginParam
extends BasicParam
193 public String source_dir
= "";
194 public String target_dir
= "";
195 public String saveFileName
= "distCorr.txt";
197 public boolean applyCorrection
= true;
198 public boolean visualizeResults
= true;
200 public int numberOfImages
= 9;
201 public int firstImageIndex
= 0;
204 * Original and calibrated images are supposed to have the same names,
205 * but are in different directories
207 public String
[] names
;
209 public int saveOrLoad
= 0;
212 public void addFields( final GenericDialog gd
)
214 super.addFields( gd
);
218 public boolean readFields( final GenericDialog gd
)
220 super.readFields( gd
);
222 return !gd
.invalidNumber();
226 * Setup as a three step dialog.
229 public boolean setup( final String title
)
232 while ( source_dir
== "" )
234 final DirectoryChooser dc
= new DirectoryChooser( "Calibration Images" );
235 source_dir
= dc
.getDirectory();
236 if ( null == source_dir
) return false;
238 source_dir
= source_dir
.replace( '\\', '/' );
239 if ( !source_dir
.endsWith( "/" ) ) source_dir
+= "/";
242 final String exts
= ".tif.jpg.png.gif.tiff.jpeg.bmp.pgm";
243 names
= new File( source_dir
).list(
246 public boolean accept( File dir
, String name
)
248 int idot
= name
.lastIndexOf( '.' );
249 if ( -1 == idot
) return false;
250 return exts
.contains( name
.substring( idot
).toLowerCase() );
253 Arrays
.sort( names
);
255 final GenericDialog gd
= new GenericDialog( title
);
257 gd
.addNumericField( "number_of_images :", 9, 0 );
258 gd
.addChoice( "first_image :", names
, names
[ 0 ] );
259 gd
.addNumericField( "power_of_polynomial_kernel :", dimension
, 0 );
260 gd
.addNumericField( "lambda :", lambda
, 6 );
261 gd
.addCheckbox( "apply_correction_to_images", applyCorrection
);
262 gd
.addCheckbox( "visualize results", visualizeResults
);
263 final String
[] options
= new String
[]{ "save", "load" };
264 gd
.addChoice( "What to do? ", options
, options
[ saveOrLoad
] );
265 gd
.addStringField( "file_name: ", saveFileName
);
268 if (gd
.wasCanceled()) return false;
270 numberOfImages
= ( int )gd
.getNextNumber();
271 firstImageIndex
= gd
.getNextChoiceIndex();
272 dimension
= ( int )gd
.getNextNumber();
273 lambda
= gd
.getNextNumber();
274 applyCorrection
= gd
.getNextBoolean();
275 visualizeResults
= gd
.getNextBoolean();
276 saveOrLoad
= gd
.getNextChoiceIndex();
277 saveFileName
= gd
.getNextString();
279 if ( saveOrLoad
== 0 || visualizeResults
)
281 final GenericDialog gds
= new GenericDialog( title
);
282 SIFT
.addFields( gds
, sift
);
284 gds
.addNumericField( "closest/next_closest_ratio :", rod
, 2 );
286 gds
.addMessage( "Geometric Consensus Filter:" );
287 gds
.addNumericField( "maximal_alignment_error :", maxEpsilon
, 2, 6, "px" );
288 gds
.addNumericField( "inlier_ratio :", minInlierRatio
, 2 );
289 gds
.addChoice( "expected_transformation :", modelStrings
, modelStrings
[ expectedModelIndex
] );
292 if ( gds
.wasCanceled() ) return false;
294 SIFT
.readFields( gds
, sift
);
296 rod
= ( float )gds
.getNextNumber();
298 maxEpsilon
= ( float )gds
.getNextNumber();
299 minInlierRatio
= ( float )gds
.getNextNumber();
300 expectedModelIndex
= gds
.getNextChoiceIndex();
302 return !( gd
.invalidNumber() || gds
.invalidNumber() );
305 return !gd
.invalidNumber();
309 static final public BasicParam p
= new BasicParam();
310 static final public PluginParam sp
= new PluginParam();
312 NonLinearTransform nlt
= new NonLinearTransform();
313 AbstractAffineModel2D
< ?
>[] models
;
315 public void run(String arg
)
317 if ( !sp
.setup( "Lens Correction" ) ) return;
319 List
< List
< PointMatch
> > inliers
= null;
320 if (sp
.saveOrLoad
== 0) {
322 //IJ.log( sp.source_dir + sp.names[ 0 ] );
323 final ImagePlus imgTmp
= new Opener().openImage( sp
.source_dir
+ sp
.names
[ 0 ] );
324 final int imageWidth
= imgTmp
.getWidth(), imageHeight
=imgTmp
.getHeight();
325 /** imgTmp was just needed to get width and height of the images */
328 List
< Feature
>[] siftFeatures
= extractSIFTFeaturesThreaded( sp
.numberOfImages
, sp
.source_dir
, sp
.names
);
330 List
< PointMatch
>[] inliersTmp
= new ArrayList
[ sp
.numberOfImages
* ( sp
.numberOfImages
- 1 ) ];
331 models
= new AbstractAffineModel2D
[ sp
.numberOfImages
* ( sp
.numberOfImages
- 1 ) ];
333 IJ
.showStatus( "Estimating Correspondences" );
334 for( int i
= 0; i
< sp
.numberOfImages
; ++i
)
336 IJ
.log( "Estimating correspondences of image " + i
);
337 IJ
.showProgress( ( i
+ 1 ), sp
.numberOfImages
);
338 extractSIFTPointsThreaded( i
, siftFeatures
, inliersTmp
, models
);
342 inliers
= new ArrayList
< List
< PointMatch
> >();
343 for ( int i
= 0; i
< inliersTmp
.length
; i
++ )
345 // if vector at inliersTmp[i] contains only one null element,
346 // its size is still 1
347 if (inliersTmp
[i
].size() > 10){
348 wholeCount
+= inliersTmp
[i
].size();
350 //if I do not do this then the models have not the
351 //right index corresponding to the inliers positions in the vector
352 inliers
.add(inliersTmp
[i
]);
355 //this is really really ugly
356 double[][] tp
= new double[wholeCount
][6];
357 double h1
[][] = new double[wholeCount
][2];
358 double h2
[][] = new double[wholeCount
][2];
360 for ( int i
= 0; i
< inliers
.size(); ++i
)
362 if ( inliers
.get(i
).size() > 10 )
364 double[][] points1
= new double[inliers
.get(i
).size()][2];
365 double[][] points2
= new double[inliers
.get(i
).size()][2];
367 for (int j
=0; j
< inliers
.get(i
).size(); j
++){
369 float[] tmp1
= ((PointMatch
) inliers
.get(i
).get(j
)).getP1().getL();
370 float[] tmp2
= ((PointMatch
) inliers
.get(i
).get(j
)).getP2().getL();
372 points1
[j
][0] = (double) tmp1
[0];
373 points1
[j
][1] = (double) tmp1
[1];
374 points2
[j
][0] = (double) tmp2
[0];
375 points2
[j
][1] = (double) tmp2
[1];
377 h1
[count
] = new double[] {(double) tmp1
[0], (double) tmp1
[1]};
378 h2
[count
] = new double[] {(double) tmp2
[0], (double) tmp2
[1]};
380 models
[i
].createAffine().getMatrix(tp
[count
]);
386 //if ( sp.saveOrLoad == 0 )
388 nlt
= distortionCorrection( h1
, h2
, tp
, sp
.dimension
, sp
.lambda
, imageWidth
, imageHeight
);
389 nlt
.visualizeSmall( sp
.lambda
);
394 final GenericDialog gdl
= new GenericDialog( "New lambda?" );
395 gdl
.addMessage( "If the distortion field shows a clear translation, \n it is likely that you need to increase lambda." );
396 gdl
.addNumericField( "lambda :", sp
.lambda
, 6 );
398 if ( gdl
.wasCanceled() ) break;
399 sp
.lambda
= gdl
.getNextNumber();
400 nlt
= distortionCorrection( h1
, h2
, tp
, sp
.dimension
, sp
.lambda
, imageWidth
, imageHeight
);
401 nlt
.visualizeSmall( sp
.lambda
);
403 nlt
.save( sp
.source_dir
+ sp
.saveFileName
);
405 //after all preprocessing is done, estimate the distortion correction transform
408 if ( sp
.saveOrLoad
== 1 )
410 nlt
.load( sp
.source_dir
+ sp
.saveFileName
);
414 if ( sp
.applyCorrection
|| sp
.visualizeResults
)
416 sp
.target_dir
= correctImages();
419 if ( sp
.visualizeResults
&& (sp
.saveOrLoad
== 0))
421 IJ
.log( "call nlt.visualize()" );
423 if(null != sp
.target_dir
)
425 IJ
.log( "Evaluating distortion correction..." );
426 evaluateCorrection( inliers
);
432 public void evaluateCorrection( List
< List
< PointMatch
> > inliers
)
434 IJ
.showStatus("Evaluating Distortion Correction");
435 double[][] original
= new double[ sp
.numberOfImages
][ 2 ];
436 double[][] corrected
= new double[ sp
.numberOfImages
][ 2 ];
439 for ( int i
= sp
.firstImageIndex
; i
< sp
.numberOfImages
; i
++ )
441 original
[ i
] = evaluateCorrectionXcorr( i
, sp
.source_dir
);
442 corrected
[ i
] = evaluateCorrectionXcorr( i
, sp
.target_dir
);
444 DefaultCategoryDataset dataset
= new DefaultCategoryDataset();
445 DefaultCategoryDataset datasetGain
= new DefaultCategoryDataset();
446 DefaultCategoryDataset datasetGrad
= new DefaultCategoryDataset();
448 for ( int i
= 0; i
< ( original
.length
); ++i
)
450 dataset
.setValue(Math
.abs(original
[i
][0]), "before", "image" + i
);
451 dataset
.setValue(Math
.abs(corrected
[i
][0]), "after", "image" + i
);
452 datasetGrad
.setValue(Math
.abs(original
[i
][1]), "before", "image" + i
);
453 datasetGrad
.setValue(Math
.abs(corrected
[i
][1]), "after", "image" + i
);
455 datasetGain
.setValue(Math
.abs(corrected
[i
][0]) - Math
.abs(original
[i
][0]), "gray","image" + i
);
456 datasetGain
.setValue(Math
.abs(corrected
[i
][1]) - Math
.abs(original
[i
][1]), "grad","image" + i
);
459 final JFreeChart chart
= ChartFactory
.createBarChart(
460 "Xcorr before and after correction",
464 PlotOrientation
.VERTICAL
,
467 final ImagePlus imp
= new ImagePlus( "Xcorr before and after correction Plot", chart
.createBufferedImage( 500, 300 ) );
470 final JFreeChart chartGrad
= ChartFactory
.createBarChart(
471 "XcorrGradient before and after correction",
475 PlotOrientation
.VERTICAL
,
478 final ImagePlus impGrad
= new ImagePlus( "XcorrGradient before and after correction Plot", chartGrad
.createBufferedImage( 500, 300 ) );
481 final JFreeChart chartGain
= ChartFactory
.createBarChart(
486 PlotOrientation
.VERTICAL
,
489 final ImagePlus impGain
= new ImagePlus( "Gain in Xcorr Plot", chartGain
.createBufferedImage( 500, 300 ) );
492 visualizePoints( inliers
);
494 //write xcorr data to file
495 String original0
= "", original1
= "", corrected0
= "", corrected1
= "", gain0
= "", gain1
= "";
496 for ( int i
= 0; i
< ( original
.length
); ++i
)
498 original0
= original0
+ Double
.toString(original
[i
][0]) + "; ";
499 original1
= original1
+ Double
.toString(original
[i
][1]) + "; ";
500 corrected0
= corrected0
+ Double
.toString(corrected
[i
][0]) + "; ";
501 corrected1
= corrected1
+ Double
.toString(corrected
[i
][1]) + "; ";
502 gain0
= gain0
+ Double
.toString(Math
.abs(corrected
[i
][0]) - Math
.abs(original
[i
][0])) + "; ";
503 gain1
= gain1
+ Double
.toString(Math
.abs(corrected
[i
][1]) - Math
.abs(original
[i
][1])) + "; ";
508 BufferedWriter out
= new BufferedWriter(new FileWriter( sp
.source_dir
+ "xcorrData.log" ) );
510 out
.write( original0
);
513 out
.write(original1
);
516 out
.write(corrected0
);
519 out
.write(corrected1
);
529 catch (Exception e
){System
.err
.println("Error: " + e
.getMessage());};
533 protected void extractSIFTPoints(
535 List
< Feature
>[] siftFeatures
,
536 List
< List
< PointMatch
> > inliers
,
537 List
< AbstractAffineModel2D
< ?
> > models
)
540 //save all matching candidates
541 List
< List
< PointMatch
> > candidates
= new ArrayList
< List
< PointMatch
> >();
543 for ( int j
= 0; j
< siftFeatures
.length
; j
++ )
545 if ( index
== j
) continue;
546 candidates
.add( FloatArray2DSIFT
.createMatches( siftFeatures
[ index
], siftFeatures
[ j
], 1.5f
, null, Float
.MAX_VALUE
, 0.5f
) );
549 //get rid of the outliers and save the transformations to match the inliers
550 for ( int i
= 0; i
< candidates
.size(); ++i
)
552 List
< PointMatch
> tmpInliers
= new ArrayList
< PointMatch
>();
554 final AbstractAffineModel2D
< ?
> m
;
555 switch ( sp
.expectedModelIndex
)
558 m
= new TranslationModel2D();
561 m
= new RigidModel2D();
564 m
= new SimilarityModel2D();
567 m
= new AffineModel2D();
582 catch( NotEnoughDataPointsException e
) { e
.printStackTrace(); }
584 inliers
.add( tmpInliers
);
590 * Estimates a polynomial distortion model for a set of
591 * {@linkplain PointMatch corresponding points} and returns the inverse of
592 * this model which then may be used to undo the distortion.
596 * a list of affines in the same order as matches that transfer a match
597 * collection into a common image frame
598 * @param dimension the order of the polynomial model
599 * @param lambda regularization factor
603 * @return a polynomial distortion model to undo the distortion
605 static public NonLinearTransform
createInverseDistortionModel(
606 final Collection
< PointMatchCollectionAndAffine
> pointMatches
,
609 final int imageWidth
,
610 final int imageHeight
)
613 for ( final PointMatchCollectionAndAffine pma
: pointMatches
)
614 if ( pma
.pointMatches
.size() > 10 )
615 wholeCount
+= pma
.pointMatches
.size();
617 final double[][] tp
= new double[ wholeCount
][ 6 ];
618 final double h1
[][] = new double[ wholeCount
][ 2 ];
619 final double h2
[][] = new double[ wholeCount
][ 2 ];
621 for ( final PointMatchCollectionAndAffine pma
: pointMatches
)
623 if ( pma
.pointMatches
.size() > 10 )
626 for ( final PointMatch match
: pma
.pointMatches
)
628 final float[] tmp1
= match
.getP1().getL();
629 final float[] tmp2
= match
.getP2().getL();
631 h1
[ count
] = new double[]{ ( double ) tmp1
[ 0 ], ( double ) tmp1
[ 1 ] };
632 h2
[ count
] = new double[]{ ( double ) tmp2
[ 0 ], ( double ) tmp2
[ 1 ] };
634 pma
.affine
.getMatrix( tp
[ count
] );
642 NonLinearTransform nlt
= distortionCorrection(h1
, h2
, tp
, dimension
, lambda
, imageWidth
, imageHeight
);
643 nlt
.inverseTransform( h1
);
648 static protected NonLinearTransform
distortionCorrection( double hack1
[][], double hack2
[][], double transformParams
[][], int dimension
, double lambda
, int w
, int h
)
650 IJ
.showStatus( "Getting the Distortion Field" );
651 NonLinearTransform nlt
= new NonLinearTransform( dimension
, w
, h
);
652 nlt
.estimateDistortion( hack1
, hack2
, transformParams
, lambda
, w
, h
);
654 nlt
.inverseTransform( hack1
);
658 protected String
correctImages()
660 if ( !sp
.applyCorrection
)
662 sp
.target_dir
= System
.getProperty( "user.dir" ).replace( '\\', '/' ) + "/distCorr_tmp/";
663 System
.out
.println( "Tmp target directory: " + sp
.target_dir
);
665 if ( new File( sp
.target_dir
).exists() )
667 System
.out
.println( "removing old tmp directory!" );
669 final String
[] filesToDelete
= new File( sp
.target_dir
).list();
670 for ( int i
= 0; i
< filesToDelete
.length
; i
++ )
672 System
.out
.println( filesToDelete
[ i
] );
673 boolean deleted
= new File( sp
.target_dir
+ filesToDelete
[ i
] ).delete();
674 if ( !deleted
) IJ
.log( "Error: Could not remove temporary directory!" );
676 new File( sp
.target_dir
).delete();
680 // Create one directory
681 boolean success
= ( new File( sp
.target_dir
) ).mkdir();
682 if ( success
) new File( sp
.target_dir
).deleteOnExit();
684 catch ( Exception e
)
686 IJ
.showMessage( "Error! Could not create temporary directory. " + e
.getMessage() );
689 if ( sp
.target_dir
== "" || null == sp
.target_dir
)
691 final DirectoryChooser dc
= new DirectoryChooser( "Target Directory" );
692 sp
.target_dir
= dc
.getDirectory();
693 if ( null == sp
.target_dir
) return null;
694 sp
.target_dir
= sp
.target_dir
.replace( '\\', '/' );
695 if ( !sp
.target_dir
.endsWith( "/" ) ) sp
.target_dir
+= "/";
698 final String
[] namesTarget
= new File( sp
.target_dir
).list( new FilenameFilter()
700 public boolean accept( File dir
, String namesTarget
)
702 int idot
= namesTarget
.lastIndexOf( '.' );
703 if ( -1 == idot
) return false;
704 return namesTarget
.contains( namesTarget
.substring( idot
).toLowerCase() );
708 if ( namesTarget
.length
> 0 ) IJ
.showMessage( "Overwrite Message", "There are already images in that directory. These will be used for evaluation." );
712 IJ
.showStatus( "Correcting Images" );
714 final Thread
[] threads
= MultiThreading
.newThreads();
715 final AtomicInteger ai
= new AtomicInteger( sp
.applyCorrection ?
0 : sp
.firstImageIndex
);
717 for ( int ithread
= 0; ithread
< threads
.length
; ++ithread
)
719 threads
[ ithread
] = new Thread()
723 setPriority( Thread
.NORM_PRIORITY
);
725 for ( int i
= ai
.getAndIncrement(); i
< ( sp
.applyCorrection ? sp
.names
.length
: ( sp
.firstImageIndex
+ sp
.numberOfImages
) ); i
= ai
.getAndIncrement() )
727 IJ
.log( "Correcting image " + sp
.names
[ i
] );
728 final ImagePlus imps
= new Opener().openImage( sp
.source_dir
+ sp
.names
[ i
] );
729 imps
.setProcessor( imps
.getTitle(), imps
.getProcessor().convertToShort( false ) );
730 ImageProcessor
[] transErg
= nlt
.transform( imps
.getProcessor() );
731 imps
.setProcessor( imps
.getTitle(), transErg
[ 0 ] );
732 if ( !sp
.applyCorrection
) new File( sp
.target_dir
+ sp
.names
[ i
] ).deleteOnExit();
733 new FileSaver( imps
).saveAsTiff( sp
.target_dir
+ sp
.names
[ i
] );
738 MultiThreading
.startAndJoin( threads
);
740 return sp
.target_dir
;
743 protected double[] evaluateCorrectionXcorr( int index
, String directory
)
745 ImagePlus im1
= new Opener().openImage( directory
+ sp
.names
[ index
] );
746 im1
.setProcessor( sp
.names
[ index
], im1
.getProcessor().convertToShort( false ) );
749 ArrayList
< Double
> xcorrVals
= new ArrayList
< Double
>();
750 ArrayList
< Double
> xcorrValsGrad
= new ArrayList
< Double
>();
752 for ( int i
= 0; i
< sp
.numberOfImages
; i
++ )
758 if ( models
[ index
* ( sp
.numberOfImages
- 1 ) + count
] == null )
764 ImagePlus newImg
= new Opener().openImage( directory
+ sp
.names
[ i
+ sp
.firstImageIndex
] );
765 newImg
.setProcessor( newImg
.getTitle(), newImg
.getProcessor().convertToShort( false ) );
767 newImg
.setProcessor( sp
.names
[ i
+ sp
.firstImageIndex
], applyTransformToImageInverse( models
[ index
* ( sp
.numberOfImages
- 1 ) + count
], newImg
.getProcessor() ) );
769 // If you want to see the stitching improvement run this
770 // ImageProcessor testIp = im1.getProcessor().duplicate();
772 // for ( int x=0; x < testIp.getWidth(); x++){
773 // for (int y=0; y < testIp.getHeight(); y++){
774 // testIp.set(x, y, Math.abs(im1.getProcessor().get(x,y) -
775 // newImg.getProcessor().get(x,y)));
779 // ImagePlus testImg = new ImagePlus(sp.names[index] + " minus " +
780 // sp.names[i], testIp);
785 xcorrVals
.add( getXcorrBlackOut( im1
.getProcessor(), newImg
.getProcessor() ) );
787 xcorrValsGrad
.add( getXcorrBlackOutGradient( im1
.getProcessor(), newImg
.getProcessor() ) );
791 Collections
.sort( xcorrVals
);
792 Collections
.sort( xcorrValsGrad
);
794 // double[] medians = { xcorrVals.get( xcorrVals.size() / 2 ), xcorrValsGrad.get( xcorrValsGrad.size() / 2 ) };
796 double m1
= 0.0, m2
= 0.0;
797 for ( int i
= 0; i
< xcorrVals
.size(); i
++ )
799 m1
+= xcorrVals
.get( i
);
800 m2
+= xcorrValsGrad
.get( i
);
803 m1
/= xcorrVals
.size();
804 m2
/= xcorrVals
.size();
806 double[] means
= { m1
, m2
};
812 ImageProcessor
applyTransformToImageInverse(
813 AbstractAffineModel2D
< ?
> a
, ImageProcessor ip
){
814 ImageProcessor newIp
= ip
.duplicate();
817 for (int x
=0; x
<ip
.getWidth(); x
++){
818 for (int y
=0; y
<ip
.getHeight(); y
++){
819 float[] position
= {(float)x
,(float)y
};
820 // float[] newPosition = a.apply(position);
821 float[] newPosition
= {0,0,};
824 newPosition
= a
.applyInverse(position
);
826 catch ( NoninvertibleModelException e
) {}
828 int xn
= (int) newPosition
[0];
829 int yn
= (int) newPosition
[1];
831 if ( (xn
>= 0) && (yn
>= 0) && (xn
< ip
.getWidth()) && (yn
< ip
.getHeight()))
832 newIp
.set(xn
,yn
,ip
.get(x
,y
));
839 double getXcorrBlackOutGradient(ImageProcessor ip1
, ImageProcessor ip2
){
840 ImageProcessor ip1g
= getGradientSobel(ip1
);
841 ImageProcessor ip2g
= getGradientSobel(ip2
);
843 return getXcorrBlackOut(ip1g
, ip2g
);
846 //this blends out gradients that include black pixels to make the sharp border caused
847 //by the nonlinear transformation not disturb the gradient comparison
848 //FIXME: this should be handled by a mask image!
849 ImageProcessor
getGradientSobel(ImageProcessor ip
){
850 ImageProcessor ipGrad
= ip
.duplicate();
853 for (int i
=1; i
<ipGrad
.getWidth()-1; i
++){
854 for (int j
=1; j
<ipGrad
.getHeight()-1; j
++){
855 if(ip
.get(i
-1,j
-1)==0 || ip
.get(i
-1,j
)==0 || ip
.get(i
-1,j
+1)==0 ||
856 ip
.get(i
,j
-1)==0 || ip
.get(i
,j
)==0 || ip
.get(i
,j
+1)==0 ||
857 ip
.get(i
+1,j
-1)==0 || ip
.get(i
+1,j
)==0 || ip
.get(i
+1,j
+1)==0 )
860 double gradX
= (double) -ip
.get(i
-1, j
-1) - 2* ip
.get(i
-1,j
) - ip
.get(i
-1,j
+1)
861 +ip
.get(i
+1, j
-1) + 2* ip
.get(i
+1,j
) + ip
.get(i
+1,j
+1);
863 double gradY
= (double) -ip
.get(i
-1, j
-1) - 2* ip
.get(i
,j
-1) - ip
.get(i
+1,j
-1)
864 +ip
.get(i
-1, j
+1) + 2* ip
.get(i
,j
+1) + ip
.get(i
+1,j
+1);
866 double mag
= Math
.sqrt(gradX
*gradX
+ gradY
*gradY
);
867 ipGrad
.setf(i
,j
,(float) mag
);
874 //tested the result against matlab routine, this worked fine
875 static double getXcorrBlackOut(ImageProcessor ip1
, ImageProcessor ip2
){
877 ip1
= ip1
.convertToFloat();
878 ip2
= ip2
.convertToFloat();
880 //If this is not done, the black area from the transformed image influences xcorr result
881 //better alternative would be to use mask images and only calculate xcorr of
882 //the region present in both images.
883 for (int i
=0; i
<ip1
.getWidth(); i
++){
884 for (int j
=0; j
<ip1
.getHeight(); j
++){
885 if (ip1
.get(i
,j
) == 0)
887 if (ip2
.get(i
,j
) == 0)
892 // FloatProcessor ip1f = (FloatProcessor)ip1.convertToFloat();
893 // FloatProcessor ip2f = (FloatProcessor)ip2.convertToFloat();
895 float[] data1
= ( float[] )ip1
.getPixels();
896 float[] data2
= ( float[] )ip2
.getPixels();
898 double[] data1b
= new double[data1
.length
];
899 double[] data2b
= new double[data2
.length
];
902 double mean1
= 0.0, mean2
= 0.0;
904 for (int i
=0; i
< data1
.length
; i
++){
905 //if ((data1[i] == 0) || (data2[i] == 0))
907 data1b
[i
] = data1
[i
];
908 data2b
[i
] = data2
[i
];
914 mean1
/= (double) count
;
915 mean2
/= (double) count
;
917 double L2_1
= 0.0, L2_2
= 0.0;
918 for (int i
=0; i
< count
; i
++){
919 L2_1
+= (data1b
[i
] - mean1
) * (data1b
[i
] - mean1
);
920 L2_2
+= (data2b
[i
] - mean2
) * (data2b
[i
] - mean2
);
923 L2_1
= Math
.sqrt(L2_1
);
924 L2_2
= Math
.sqrt(L2_2
);
927 for (int i
=0; i
< count
; i
++){
928 xcorr
+= ((data1b
[i
]-mean1
) / L2_1
) * ((data2b
[i
]-mean2
) / L2_2
);
931 //System.out.println("XcorrVal: " + xcorr);
936 void visualizePoints( List
< List
< PointMatch
> > inliers
)
938 ColorProcessor ip
= new ColorProcessor(nlt
.getWidth(), nlt
.getHeight());
939 ip
.setColor(Color
.red
);
942 for (int i
=0; i
< inliers
.size(); i
++){
943 for (int j
=0; j
< inliers
.get(i
).size(); j
++){
944 float[] tmp1
= inliers
.get(i
).get(j
).getP1().getW();
945 float[] tmp2
= inliers
.get(i
).get(j
).getP2().getL();
946 ip
.setColor(Color
.red
);
947 ip
.drawDot((int) tmp2
[0], (int) tmp2
[1]);
948 ip
.setColor(Color
.blue
);
949 ip
.drawDot((int) tmp1
[0], (int) tmp1
[1]);
953 ImagePlus points
= new ImagePlus("Corresponding Points after correction", ip
);
957 public void getTransform(double[][] points1
, double[][] points2
, double[][] transformParams
){
958 double[][] p1
= new double[points1
.length
][3];
959 double[][] p2
= new double[points2
.length
][3];
961 for (int i
=0; i
< points1
.length
; i
++){
962 p1
[i
][0] = points1
[i
][0];
963 p1
[i
][1] = points1
[i
][1];
966 p2
[i
][0] = points2
[i
][0];
967 p2
[i
][1] = points2
[i
][1];
972 Matrix s1
= new Matrix(p1
);
973 Matrix s2
= new Matrix(p2
);
974 Matrix t
= (s1
.transpose().times(s1
)).inverse().times(s1
.transpose()).times(s2
);
976 for (int i
=0; i
< transformParams
.length
; i
++){
977 if (transformParams
[i
][0] == -10){
978 transformParams
[i
][0] = t
.get(0,0);
979 transformParams
[i
][1] = t
.get(0,1);
980 transformParams
[i
][2] = t
.get(1,0);
981 transformParams
[i
][3] = t
.get(1,1);
982 transformParams
[i
][4] = t
.get(2,0);
983 transformParams
[i
][5] = t
.get(2,1);
993 static List
< Feature
>[] extractSIFTFeaturesThreaded(
994 final int numberOfImages
, final String directory
,
995 final String
[] names
){
996 //extract all SIFT Features
998 final List
< Feature
>[] siftFeatures
= new ArrayList
[numberOfImages
];
999 final Thread
[] threads
= MultiThreading
.newThreads();
1000 final AtomicInteger ai
= new AtomicInteger(0); // start at second slice
1002 IJ
.showStatus("Extracting SIFT Features");
1003 for (int ithread
= 0; ithread
< threads
.length
; ++ithread
) {
1004 threads
[ithread
] = new Thread() {
1006 for (int i
= ai
.getAndIncrement(); i
< numberOfImages
; i
= ai
.getAndIncrement())
1008 final ArrayList
< Feature
> fs
= new ArrayList
< Feature
>();
1009 ImagePlus imps
= new Opener().openImage(directory
+ names
[i
+ sp
.firstImageIndex
]);
1010 imps
.setProcessor(imps
.getTitle(), imps
.getProcessor().convertToFloat());
1012 FloatArray2DSIFT sift
= new FloatArray2DSIFT( sp
.sift
.clone() );
1013 SIFT ijSIFT
= new SIFT( sift
);
1015 ijSIFT
.extractFeatures( imps
.getProcessor(), fs
);
1017 Collections
.sort( fs
);
1018 IJ
.log("Extracting SIFT of image: "+i
);
1026 MultiThreading
.startAndJoin(threads
);
1028 return siftFeatures
;
1031 static protected void extractSIFTPointsThreaded(
1033 final List
< Feature
>[] siftFeatures
,
1034 final List
< PointMatch
>[] inliers
,
1035 final AbstractAffineModel2D
< ?
>[] models
)
1038 // save all matching candidates
1039 final List
< PointMatch
>[] candidates
= new List
[ siftFeatures
.length
- 1 ];
1041 final Thread
[] threads
= MultiThreading
.newThreads();
1042 final AtomicInteger ai
= new AtomicInteger( 0 ); // start at second
1045 for ( int ithread
= 0; ithread
< threads
.length
; ++ithread
)
1047 threads
[ ithread
] = new Thread()
1051 setPriority( Thread
.NORM_PRIORITY
);
1053 for ( int j
= ai
.getAndIncrement(); j
< candidates
.length
; j
= ai
.getAndIncrement() )
1055 int i
= ( j
< index ? j
: j
+ 1 );
1056 candidates
[ j
] = FloatArray2DSIFT
.createMatches( siftFeatures
[ index
], siftFeatures
[ i
], 1.5f
, null, Float
.MAX_VALUE
, 0.5f
);
1062 MultiThreading
.startAndJoin( threads
);
1064 // get rid of the outliers and save the rigid transformations to match
1067 final AtomicInteger ai2
= new AtomicInteger( 0 );
1068 for ( int ithread
= 0; ithread
< threads
.length
; ++ithread
)
1070 threads
[ ithread
] = new Thread()
1074 setPriority( Thread
.NORM_PRIORITY
);
1075 for ( int i
= ai2
.getAndIncrement(); i
< candidates
.length
; i
= ai2
.getAndIncrement() )
1078 List
< PointMatch
> tmpInliers
= new ArrayList
< PointMatch
>();
1080 // RigidModel2D.estimateBestModel(candidates.get(i),
1081 // tmpInliers, sp.min_epsilon, sp.max_epsilon,
1082 // sp.min_inlier_ratio);
1084 final AbstractAffineModel2D
< ?
> m
;
1085 switch ( sp
.expectedModelIndex
)
1088 m
= new TranslationModel2D();
1091 m
= new RigidModel2D();
1094 m
= new SimilarityModel2D();
1097 m
= new AffineModel2D();
1103 boolean modelFound
= false;
1106 modelFound
= m
.filterRansac( candidates
[ i
], tmpInliers
, 1000, sp
.maxEpsilon
, sp
.minInlierRatio
, 10 );
1108 catch ( NotEnoughDataPointsException e
)
1114 IJ
.log( "Model found:\n " + candidates
[ i
].size() + " candidates\n " + tmpInliers
.size() + " inliers\n " + String
.format( "%.2f", m
.getCost() ) + "px average displacement" );
1116 IJ
.log( "No Model found." );
1118 inliers
[ index
* ( sp
.numberOfImages
- 1 ) + i
] = tmpInliers
;
1119 models
[ index
* ( sp
.numberOfImages
- 1 ) + i
] = m
;
1120 // System.out.println("**** MODEL ADDED: " +
1121 // (index*(sp.numberOfImages-1)+i));
1127 MultiThreading
.startAndJoin( threads
);