Fix error in Utils.safeCopy
[trakem2.git] / lenscorrection / Distortion_Correction.java
blob8bdfef7fa6e28f1a12e4400d0f3f04140847f330
1 /**
3 ImageJ plugin
4 Copyright (C) 2008 Verena Kaynig.
6 This program is free software; you can redistribute it and/or
7 modify it under the terms of the GNU General Public License
8 as published by the Free Software Foundation (http://www.gnu.org/licenses/gpl.txt )
10 This program is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 GNU General Public License for more details.
15 You should have received a copy of the GNU General Public License
16 along with this program; if not, write to the Free Software
17 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
18 **/
21 /* **************************************************************************** *
22 * This ImageJ Plugin estimates a non linear lens distortion in *
23 * the images given and corrects all images with the same transformation *
24 * *
25 * *
26 * TODO: *
27 * - show histogram of beta coefficients to evaluate kernel dimension needed *
28 * - show SIFT matches before and after correction? *
29 * - give numerical xcorr results in a better manner *
30 * - show visual impression of the stitching ? *
31 * - DOKUMENTATION *
32 * - do mask images properly to incorporate into TrakEM *
33 * *
34 * Author: Verena Kaynig *
35 * Kontakt: verena.kaynig@inf.ethz.ch *
36 * *
37 * SIFT implementation: Stephan Saalfeld *
38 * **************************************************************************** */
40 package lenscorrection;
42 import ij.IJ;
43 import ij.gui.GenericDialog;
44 import ij.plugin.PlugIn;
45 import ij.process.ColorProcessor;
46 import ij.process.ImageProcessor;
47 import ij.ImagePlus;
48 import ij.io.DirectoryChooser;
49 import ij.io.Opener;
50 import ij.io.FileSaver;
52 import mpi.fruitfly.general.MultiThreading;
53 import mpicbg.models.*;
54 import mpicbg.ij.SIFT;
55 import mpicbg.imagefeatures.*;
57 import java.util.ArrayList;
58 import java.util.Arrays;
59 import java.util.Collection;
60 import java.util.Collections;
61 import java.util.List;
62 import java.util.concurrent.atomic.AtomicInteger;
63 import java.awt.Color;
64 import java.awt.geom.AffineTransform;
65 import java.io.*;
67 import org.jfree.chart.*;
68 import org.jfree.chart.plot.PlotOrientation;
69 import org.jfree.data.category.DefaultCategoryDataset;
71 import Jama.Matrix;
74 public class Distortion_Correction implements PlugIn{
76 static public class PointMatchCollectionAndAffine
78 final public AffineTransform affine;
79 final public Collection< PointMatch > pointMatches;
80 public PointMatchCollectionAndAffine( final AffineTransform affine, final Collection< PointMatch > pointMatches )
82 this.affine = affine;
83 this.pointMatches = pointMatches;
87 static public class BasicParam
89 final public FloatArray2DSIFT.Param sift = new FloatArray2DSIFT.Param();
91 /**
92 * Closest/next closest neighbour distance ratio
94 public float rod = 0.92f;
96 /**
97 * Maximal allowed alignment error in px
99 public float maxEpsilon = 32.0f;
102 * Inlier/candidates ratio
104 public float minInlierRatio = 0.2f;
107 * Implemeted transformation models for choice
109 final static public String[] modelStrings = new String[]{ "Translation", "Rigid", "Similarity", "Affine" };
110 public int expectedModelIndex = 1;
113 * Order of the polynomial kernel
115 public int dimension = 5;
118 * Regularization factor
120 public double lambda = 0.01;
122 public BasicParam()
124 sift.fdSize = 8;
125 sift.maxOctaveSize = 1600;
126 sift.minOctaveSize = 400;
129 public void addFields( final GenericDialog gd )
131 SIFT.addFields( gd, sift );
133 gd.addNumericField( "closest/next_closest_ratio :", rod, 2 );
135 gd.addMessage( "Geometric Consensus Filter :" );
136 gd.addNumericField( "maximal_alignment_error :", maxEpsilon, 2, 6, "px" );
137 gd.addNumericField( "inlier_ratio :", minInlierRatio, 2 );
138 gd.addChoice( "expected_transformation :", modelStrings, modelStrings[ expectedModelIndex ] );
140 gd.addMessage( "Lens Model :" );
141 gd.addNumericField( "power_of_polynomial_kernel :", dimension, 0 );
142 gd.addNumericField( "lambda :", lambda, 6 );
145 public boolean readFields( final GenericDialog gd )
147 SIFT.readFields( gd, sift );
149 rod = ( float )gd.getNextNumber();
151 maxEpsilon = ( float )gd.getNextNumber();
152 minInlierRatio = ( float )gd.getNextNumber();
153 expectedModelIndex = gd.getNextChoiceIndex();
155 dimension = ( int )gd.getNextNumber();
156 lambda = ( double )gd.getNextNumber();
158 return !gd.invalidNumber();
161 public boolean setup( final String title )
163 final GenericDialog gd = new GenericDialog( title );
164 addFields( gd );
167 gd.showDialog();
168 if ( gd.wasCanceled() ) return false;
170 while ( !readFields( gd ) );
172 return true;
175 @Override
176 public BasicParam clone()
178 final BasicParam p = new BasicParam();
179 p.sift.set( sift );
180 p.dimension = dimension;
181 p.expectedModelIndex = expectedModelIndex;
182 p.lambda = lambda;
183 p.maxEpsilon = maxEpsilon;
184 p.minInlierRatio = minInlierRatio;
185 p.rod = rod;
187 return p;
191 static public class PluginParam extends BasicParam
193 public String source_dir = "";
194 public String target_dir = "";
195 public String saveFileName = "distCorr.txt";
197 public boolean applyCorrection = true;
198 public boolean visualizeResults = true;
200 public int numberOfImages = 9;
201 public int firstImageIndex = 0;
204 * Original and calibrated images are supposed to have the same names,
205 * but are in different directories
207 public String[] names;
209 public int saveOrLoad = 0;
211 @Override
212 public void addFields( final GenericDialog gd )
214 super.addFields( gd );
217 @Override
218 public boolean readFields( final GenericDialog gd )
220 super.readFields( gd );
222 return !gd.invalidNumber();
226 * Setup as a three step dialog.
228 @Override
229 public boolean setup( final String title )
231 source_dir = "";
232 while ( source_dir == "" )
234 final DirectoryChooser dc = new DirectoryChooser( "Calibration Images" );
235 source_dir = dc.getDirectory();
236 if ( null == source_dir ) return false;
238 source_dir = source_dir.replace( '\\', '/' );
239 if ( !source_dir.endsWith( "/" ) ) source_dir += "/";
242 final String exts = ".tif.jpg.png.gif.tiff.jpeg.bmp.pgm";
243 names = new File( source_dir ).list(
244 new FilenameFilter()
246 public boolean accept( File dir, String name )
248 int idot = name.lastIndexOf( '.' );
249 if ( -1 == idot ) return false;
250 return exts.contains( name.substring( idot ).toLowerCase() );
252 } );
253 Arrays.sort( names );
255 final GenericDialog gd = new GenericDialog( title );
257 gd.addNumericField( "number_of_images :", 9, 0 );
258 gd.addChoice( "first_image :", names, names[ 0 ] );
259 gd.addNumericField( "power_of_polynomial_kernel :", dimension, 0 );
260 gd.addNumericField( "lambda :", lambda, 6 );
261 gd.addCheckbox( "apply_correction_to_images", applyCorrection );
262 gd.addCheckbox( "visualize results", visualizeResults );
263 final String[] options = new String[]{ "save", "load" };
264 gd.addChoice( "What to do? ", options, options[ saveOrLoad ] );
265 gd.addStringField( "file_name: ", saveFileName );
266 gd.showDialog();
268 if (gd.wasCanceled()) return false;
270 numberOfImages = ( int )gd.getNextNumber();
271 firstImageIndex = gd.getNextChoiceIndex();
272 dimension = ( int )gd.getNextNumber();
273 lambda = gd.getNextNumber();
274 applyCorrection = gd.getNextBoolean();
275 visualizeResults = gd.getNextBoolean();
276 saveOrLoad = gd.getNextChoiceIndex();
277 saveFileName = gd.getNextString();
279 if ( saveOrLoad == 0 || visualizeResults )
281 final GenericDialog gds = new GenericDialog( title );
282 SIFT.addFields( gds, sift );
284 gds.addNumericField( "closest/next_closest_ratio :", rod, 2 );
286 gds.addMessage( "Geometric Consensus Filter:" );
287 gds.addNumericField( "maximal_alignment_error :", maxEpsilon, 2, 6, "px" );
288 gds.addNumericField( "inlier_ratio :", minInlierRatio, 2 );
289 gds.addChoice( "expected_transformation :", modelStrings, modelStrings[ expectedModelIndex ] );
291 gds.showDialog();
292 if ( gds.wasCanceled() ) return false;
294 SIFT.readFields( gds, sift );
296 rod = ( float )gds.getNextNumber();
298 maxEpsilon = ( float )gds.getNextNumber();
299 minInlierRatio = ( float )gds.getNextNumber();
300 expectedModelIndex = gds.getNextChoiceIndex();
302 return !( gd.invalidNumber() || gds.invalidNumber() );
305 return !gd.invalidNumber();
309 static final public BasicParam p = new BasicParam();
310 static final public PluginParam sp = new PluginParam();
312 NonLinearTransform nlt = new NonLinearTransform();
313 AbstractAffineModel2D< ? >[] models;
315 public void run(String arg)
317 if ( !sp.setup( "Lens Correction" ) ) return;
319 List< List< PointMatch > > inliers = null;
320 if (sp.saveOrLoad == 0) {
322 //IJ.log( sp.source_dir + sp.names[ 0 ] );
323 final ImagePlus imgTmp = new Opener().openImage( sp.source_dir + sp.names[ 0 ] );
324 final int imageWidth = imgTmp.getWidth(), imageHeight=imgTmp.getHeight();
325 /** imgTmp was just needed to get width and height of the images */
326 imgTmp.flush();
328 List< Feature >[] siftFeatures = extractSIFTFeaturesThreaded( sp.numberOfImages, sp.source_dir, sp.names );
330 List< PointMatch >[] inliersTmp = new ArrayList[ sp.numberOfImages * ( sp.numberOfImages - 1 ) ];
331 models = new AbstractAffineModel2D[ sp.numberOfImages * ( sp.numberOfImages - 1 ) ];
333 IJ.showStatus( "Estimating Correspondences" );
334 for( int i = 0; i < sp.numberOfImages; ++i )
336 IJ.log( "Estimating correspondences of image " + i );
337 IJ.showProgress( ( i + 1 ), sp.numberOfImages );
338 extractSIFTPointsThreaded( i, siftFeatures, inliersTmp, models );
341 int wholeCount = 0;
342 inliers = new ArrayList< List< PointMatch > >();
343 for ( int i = 0; i < inliersTmp.length; i++ )
345 // if vector at inliersTmp[i] contains only one null element,
346 // its size is still 1
347 if (inliersTmp[i].size() > 10){
348 wholeCount += inliersTmp[i].size();
350 //if I do not do this then the models have not the
351 //right index corresponding to the inliers positions in the vector
352 inliers.add(inliersTmp[i]);
355 //this is really really ugly
356 double[][] tp = new double[wholeCount][6];
357 double h1[][] = new double[wholeCount][2];
358 double h2[][] = new double[wholeCount][2];
359 int count = 0;
360 for ( int i = 0; i < inliers.size(); ++i )
362 if ( inliers.get(i).size() > 10 )
364 double[][] points1 = new double[inliers.get(i).size()][2];
365 double[][] points2 = new double[inliers.get(i).size()][2];
367 for (int j=0; j < inliers.get(i).size(); j++){
369 float[] tmp1 = ((PointMatch) inliers.get(i).get(j)).getP1().getL();
370 float[] tmp2 = ((PointMatch) inliers.get(i).get(j)).getP2().getL();
372 points1[j][0] = (double) tmp1[0];
373 points1[j][1] = (double) tmp1[1];
374 points2[j][0] = (double) tmp2[0];
375 points2[j][1] = (double) tmp2[1];
377 h1[count] = new double[] {(double) tmp1[0], (double) tmp1[1]};
378 h2[count] = new double[] {(double) tmp2[0], (double) tmp2[1]};
380 models[i].createAffine().getMatrix(tp[count]);
381 count++;
386 //if ( sp.saveOrLoad == 0 )
388 nlt = distortionCorrection( h1, h2, tp, sp.dimension, sp.lambda, imageWidth, imageHeight );
389 nlt.visualizeSmall( sp.lambda );
392 while( true )
394 final GenericDialog gdl = new GenericDialog( "New lambda?" );
395 gdl.addMessage( "If the distortion field shows a clear translation, \n it is likely that you need to increase lambda." );
396 gdl.addNumericField( "lambda :", sp.lambda, 6 );
397 gdl.showDialog();
398 if ( gdl.wasCanceled() ) break;
399 sp.lambda = gdl.getNextNumber();
400 nlt = distortionCorrection( h1, h2, tp, sp.dimension, sp.lambda, imageWidth, imageHeight );
401 nlt.visualizeSmall( sp.lambda );
403 nlt.save( sp.source_dir + sp.saveFileName );
405 //after all preprocessing is done, estimate the distortion correction transform
408 if ( sp.saveOrLoad == 1 )
410 nlt.load( sp.source_dir + sp.saveFileName );
411 nlt.print();
414 if ( sp.applyCorrection || sp.visualizeResults )
416 sp.target_dir = correctImages();
419 if ( sp.visualizeResults && (sp.saveOrLoad == 0))
421 IJ.log( "call nlt.visualize()" );
422 nlt.visualize();
423 if(null != sp.target_dir)
425 IJ.log( "Evaluating distortion correction..." );
426 evaluateCorrection( inliers );
429 IJ.log( " Done " );
432 public void evaluateCorrection( List< List< PointMatch > > inliers)
434 IJ.showStatus("Evaluating Distortion Correction");
435 double[][] original = new double[ sp.numberOfImages][ 2 ];
436 double[][] corrected = new double[ sp.numberOfImages ][ 2 ];
439 for ( int i = sp.firstImageIndex; i < sp.numberOfImages; i++ )
441 original[ i ] = evaluateCorrectionXcorr( i, sp.source_dir );
442 corrected[ i ] = evaluateCorrectionXcorr( i, sp.target_dir );
444 DefaultCategoryDataset dataset = new DefaultCategoryDataset();
445 DefaultCategoryDataset datasetGain = new DefaultCategoryDataset();
446 DefaultCategoryDataset datasetGrad = new DefaultCategoryDataset();
448 for ( int i = 0; i < ( original.length ); ++i )
450 dataset.setValue(Math.abs(original[i][0]), "before", "image" + i);
451 dataset.setValue(Math.abs(corrected[i][0]), "after", "image" + i);
452 datasetGrad.setValue(Math.abs(original[i][1]), "before", "image" + i);
453 datasetGrad.setValue(Math.abs(corrected[i][1]), "after", "image" + i);
455 datasetGain.setValue(Math.abs(corrected[i][0]) - Math.abs(original[i][0]), "gray","image" + i);
456 datasetGain.setValue(Math.abs(corrected[i][1]) - Math.abs(original[i][1]), "grad","image" + i);
459 final JFreeChart chart = ChartFactory.createBarChart(
460 "Xcorr before and after correction",
461 "ImageNumber",
462 "Xcorr",
463 dataset,
464 PlotOrientation.VERTICAL,
465 false,
466 true, false);
467 final ImagePlus imp = new ImagePlus( "Xcorr before and after correction Plot", chart.createBufferedImage( 500, 300 ) );
468 imp.show();
470 final JFreeChart chartGrad = ChartFactory.createBarChart(
471 "XcorrGradient before and after correction",
472 "ImageNumber",
473 "Xcorr",
474 datasetGrad,
475 PlotOrientation.VERTICAL,
476 false,
477 true, false);
478 final ImagePlus impGrad = new ImagePlus( "XcorrGradient before and after correction Plot", chartGrad.createBufferedImage( 500, 300 ) );
479 impGrad.show();
481 final JFreeChart chartGain = ChartFactory.createBarChart(
482 "Gain in Xcorr",
483 "ImageNumber",
484 "Xcorr",
485 datasetGain,
486 PlotOrientation.VERTICAL,
487 false,
488 true, false);
489 final ImagePlus impGain = new ImagePlus( "Gain in Xcorr Plot", chartGain.createBufferedImage( 500, 300 ) );
490 impGain.show();
492 visualizePoints( inliers );
494 //write xcorr data to file
495 String original0 = "", original1 = "", corrected0 = "", corrected1 = "", gain0 = "", gain1 = "";
496 for ( int i = 0; i < ( original.length ); ++i )
498 original0 = original0 + Double.toString(original[i][0]) + "; ";
499 original1 = original1 + Double.toString(original[i][1]) + "; ";
500 corrected0 = corrected0 + Double.toString(corrected[i][0]) + "; ";
501 corrected1 = corrected1 + Double.toString(corrected[i][1]) + "; ";
502 gain0 = gain0 + Double.toString(Math.abs(corrected[i][0]) - Math.abs(original[i][0])) + "; ";
503 gain1 = gain1 + Double.toString(Math.abs(corrected[i][1]) - Math.abs(original[i][1])) + "; ";
508 BufferedWriter out = new BufferedWriter(new FileWriter( sp.source_dir + "xcorrData.log" ) );
510 out.write( original0);
511 out.newLine();
512 out.newLine();
513 out.write(original1);
514 out.newLine();
515 out.newLine();
516 out.write(corrected0);
517 out.newLine();
518 out.newLine();
519 out.write(corrected1);
520 out.newLine();
521 out.newLine();
522 out.write(gain0);
523 out.newLine();
524 out.newLine();
525 out.write(gain1);
526 out.newLine();
527 out.close();
529 catch (Exception e){System.err.println("Error: " + e.getMessage());};
533 protected void extractSIFTPoints(
534 int index,
535 List< Feature >[] siftFeatures,
536 List< List< PointMatch > > inliers,
537 List< AbstractAffineModel2D< ? > > models )
540 //save all matching candidates
541 List< List< PointMatch > > candidates = new ArrayList< List< PointMatch > >();
543 for ( int j = 0; j < siftFeatures.length; j++ )
545 if ( index == j ) continue;
546 candidates.add( FloatArray2DSIFT.createMatches( siftFeatures[ index ], siftFeatures[ j ], 1.5f, null, Float.MAX_VALUE, 0.5f ) );
549 //get rid of the outliers and save the transformations to match the inliers
550 for ( int i = 0; i < candidates.size(); ++i )
552 List< PointMatch > tmpInliers = new ArrayList< PointMatch >();
554 final AbstractAffineModel2D< ? > m;
555 switch ( sp.expectedModelIndex )
557 case 0:
558 m = new TranslationModel2D();
559 break;
560 case 1:
561 m = new RigidModel2D();
562 break;
563 case 2:
564 m = new SimilarityModel2D();
565 break;
566 case 3:
567 m = new AffineModel2D();
568 break;
569 default:
570 return;
573 try{
574 m.filterRansac(
575 candidates.get( i ),
576 tmpInliers,
577 1000,
578 sp.maxEpsilon,
579 sp.minInlierRatio,
580 10 );
582 catch( NotEnoughDataPointsException e ) { e.printStackTrace(); }
584 inliers.add( tmpInliers );
585 models.add( m );
590 * Estimates a polynomial distortion model for a set of
591 * {@linkplain PointMatch corresponding points} and returns the inverse of
592 * this model which then may be used to undo the distortion.
594 * @param matches
595 * @param affines
596 * a list of affines in the same order as matches that transfer a match
597 * collection into a common image frame
598 * @param dimension the order of the polynomial model
599 * @param lambda regularization factor
600 * @param imageWidth
601 * @param imageHeight
603 * @return a polynomial distortion model to undo the distortion
605 static public NonLinearTransform createInverseDistortionModel(
606 final Collection< PointMatchCollectionAndAffine > pointMatches,
607 final int dimension,
608 final double lambda,
609 final int imageWidth,
610 final int imageHeight )
612 int wholeCount = 0;
613 for ( final PointMatchCollectionAndAffine pma : pointMatches )
614 if ( pma.pointMatches.size() > 10 )
615 wholeCount += pma.pointMatches.size();
617 final double[][] tp = new double[ wholeCount ][ 6 ];
618 final double h1[][] = new double[ wholeCount ][ 2 ];
619 final double h2[][] = new double[ wholeCount ][ 2 ];
620 int count = 0;
621 for ( final PointMatchCollectionAndAffine pma : pointMatches )
623 if ( pma.pointMatches.size() > 10 )
625 int i = 0;
626 for ( final PointMatch match : pma.pointMatches )
628 final float[] tmp1 = match.getP1().getL();
629 final float[] tmp2 = match.getP2().getL();
631 h1[ count ] = new double[]{ ( double ) tmp1[ 0 ], ( double ) tmp1[ 1 ] };
632 h2[ count ] = new double[]{ ( double ) tmp2[ 0 ], ( double ) tmp2[ 1 ] };
634 pma.affine.getMatrix( tp[ count ] );
635 count++;
637 ++i;
642 NonLinearTransform nlt = distortionCorrection(h1, h2, tp, dimension, lambda, imageWidth, imageHeight);
643 nlt.inverseTransform( h1 );
645 return nlt;
648 static protected NonLinearTransform distortionCorrection( double hack1[][], double hack2[][], double transformParams[][], int dimension, double lambda, int w, int h )
650 IJ.showStatus( "Getting the Distortion Field" );
651 NonLinearTransform nlt = new NonLinearTransform( dimension, w, h );
652 nlt.estimateDistortion( hack1, hack2, transformParams, lambda, w, h );
654 nlt.inverseTransform( hack1 );
655 return nlt;
658 protected String correctImages()
660 if ( !sp.applyCorrection )
662 sp.target_dir = System.getProperty( "user.dir" ).replace( '\\', '/' ) + "/distCorr_tmp/";
663 System.out.println( "Tmp target directory: " + sp.target_dir );
665 if ( new File( sp.target_dir ).exists() )
667 System.out.println( "removing old tmp directory!" );
669 final String[] filesToDelete = new File( sp.target_dir ).list();
670 for ( int i = 0; i < filesToDelete.length; i++ )
672 System.out.println( filesToDelete[ i ] );
673 boolean deleted = new File( sp.target_dir + filesToDelete[ i ] ).delete();
674 if ( !deleted ) IJ.log( "Error: Could not remove temporary directory!" );
676 new File( sp.target_dir ).delete();
680 // Create one directory
681 boolean success = ( new File( sp.target_dir ) ).mkdir();
682 if ( success ) new File( sp.target_dir ).deleteOnExit();
684 catch ( Exception e )
686 IJ.showMessage( "Error! Could not create temporary directory. " + e.getMessage() );
689 if ( sp.target_dir == "" || null == sp.target_dir )
691 final DirectoryChooser dc = new DirectoryChooser( "Target Directory" );
692 sp.target_dir = dc.getDirectory();
693 if ( null == sp.target_dir ) return null;
694 sp.target_dir = sp.target_dir.replace( '\\', '/' );
695 if ( !sp.target_dir.endsWith( "/" ) ) sp.target_dir += "/";
698 final String[] namesTarget = new File( sp.target_dir ).list( new FilenameFilter()
700 public boolean accept( File dir, String namesTarget )
702 int idot = namesTarget.lastIndexOf( '.' );
703 if ( -1 == idot ) return false;
704 return namesTarget.contains( namesTarget.substring( idot ).toLowerCase() );
706 } );
708 if ( namesTarget.length > 0 ) IJ.showMessage( "Overwrite Message", "There are already images in that directory. These will be used for evaluation." );
709 else
712 IJ.showStatus( "Correcting Images" );
714 final Thread[] threads = MultiThreading.newThreads();
715 final AtomicInteger ai = new AtomicInteger( sp.applyCorrection ? 0 : sp.firstImageIndex );
717 for ( int ithread = 0; ithread < threads.length; ++ithread )
719 threads[ ithread ] = new Thread()
721 public void run()
723 setPriority( Thread.NORM_PRIORITY );
725 for ( int i = ai.getAndIncrement(); i < ( sp.applyCorrection ? sp.names.length : ( sp.firstImageIndex + sp.numberOfImages ) ); i = ai.getAndIncrement() )
727 IJ.log( "Correcting image " + sp.names[ i ] );
728 final ImagePlus imps = new Opener().openImage( sp.source_dir + sp.names[ i ] );
729 imps.setProcessor( imps.getTitle(), imps.getProcessor().convertToShort( false ) );
730 ImageProcessor[] transErg = nlt.transform( imps.getProcessor() );
731 imps.setProcessor( imps.getTitle(), transErg[ 0 ] );
732 if ( !sp.applyCorrection ) new File( sp.target_dir + sp.names[ i ] ).deleteOnExit();
733 new FileSaver( imps ).saveAsTiff( sp.target_dir + sp.names[ i ] );
738 MultiThreading.startAndJoin( threads );
740 return sp.target_dir;
743 protected double[] evaluateCorrectionXcorr( int index, String directory )
745 ImagePlus im1 = new Opener().openImage( directory + sp.names[ index ] );
746 im1.setProcessor( sp.names[ index ], im1.getProcessor().convertToShort( false ) );
748 int count = 0;
749 ArrayList< Double > xcorrVals = new ArrayList< Double >();
750 ArrayList< Double > xcorrValsGrad = new ArrayList< Double >();
752 for ( int i = 0; i < sp.numberOfImages; i++ )
754 if ( i == index )
756 continue;
758 if ( models[ index * ( sp.numberOfImages - 1 ) + count ] == null )
760 count++;
761 continue;
764 ImagePlus newImg = new Opener().openImage( directory + sp.names[ i + sp.firstImageIndex ] );
765 newImg.setProcessor( newImg.getTitle(), newImg.getProcessor().convertToShort( false ) );
767 newImg.setProcessor( sp.names[ i + sp.firstImageIndex ], applyTransformToImageInverse( models[ index * ( sp.numberOfImages - 1 ) + count ], newImg.getProcessor() ) );
769 // If you want to see the stitching improvement run this
770 // ImageProcessor testIp = im1.getProcessor().duplicate();
772 // for ( int x=0; x < testIp.getWidth(); x++){
773 // for (int y=0; y < testIp.getHeight(); y++){
774 // testIp.set(x, y, Math.abs(im1.getProcessor().get(x,y) -
775 // newImg.getProcessor().get(x,y)));
776 // }
777 // }
779 // ImagePlus testImg = new ImagePlus(sp.names[index] + " minus " +
780 // sp.names[i], testIp);
781 // testImg.show();
782 // im1.show();
783 // newImg.show();
785 xcorrVals.add( getXcorrBlackOut( im1.getProcessor(), newImg.getProcessor() ) );
787 xcorrValsGrad.add( getXcorrBlackOutGradient( im1.getProcessor(), newImg.getProcessor() ) );
788 count++;
791 Collections.sort( xcorrVals );
792 Collections.sort( xcorrValsGrad );
794 // double[] medians = { xcorrVals.get( xcorrVals.size() / 2 ), xcorrValsGrad.get( xcorrValsGrad.size() / 2 ) };
796 double m1 = 0.0, m2 = 0.0;
797 for ( int i = 0; i < xcorrVals.size(); i++ )
799 m1 += xcorrVals.get( i );
800 m2 += xcorrValsGrad.get( i );
803 m1 /= xcorrVals.size();
804 m2 /= xcorrVals.size();
806 double[] means = { m1, m2 };
808 return means;
809 //return medians;
812 ImageProcessor applyTransformToImageInverse(
813 AbstractAffineModel2D< ? > a, ImageProcessor ip){
814 ImageProcessor newIp = ip.duplicate();
815 newIp.max(0.0);
817 for (int x=0; x<ip.getWidth(); x++){
818 for (int y=0; y<ip.getHeight(); y++){
819 float[] position = {(float)x,(float)y};
820 // float[] newPosition = a.apply(position);
821 float[] newPosition = {0,0,};
824 newPosition = a.applyInverse(position);
826 catch ( NoninvertibleModelException e ) {}
828 int xn = (int) newPosition[0];
829 int yn = (int) newPosition[1];
831 if ( (xn >= 0) && (yn >= 0) && (xn < ip.getWidth()) && (yn < ip.getHeight()))
832 newIp.set(xn,yn,ip.get(x,y));
836 return newIp;
839 double getXcorrBlackOutGradient(ImageProcessor ip1, ImageProcessor ip2){
840 ImageProcessor ip1g = getGradientSobel(ip1);
841 ImageProcessor ip2g = getGradientSobel(ip2);
843 return getXcorrBlackOut(ip1g, ip2g);
846 //this blends out gradients that include black pixels to make the sharp border caused
847 //by the nonlinear transformation not disturb the gradient comparison
848 //FIXME: this should be handled by a mask image!
849 ImageProcessor getGradientSobel(ImageProcessor ip){
850 ImageProcessor ipGrad = ip.duplicate();
851 ipGrad.max(0.0);
853 for (int i=1; i<ipGrad.getWidth()-1; i++){
854 for (int j=1; j<ipGrad.getHeight()-1; j++){
855 if(ip.get(i-1,j-1)==0 || ip.get(i-1,j)==0 || ip.get(i-1,j+1)==0 ||
856 ip.get(i,j-1)==0 || ip.get(i,j)==0 || ip.get(i,j+1)==0 ||
857 ip.get(i+1,j-1)==0 || ip.get(i+1,j)==0 || ip.get(i+1,j+1)==0 )
858 continue;
860 double gradX = (double) -ip.get(i-1, j-1) - 2* ip.get(i-1,j) - ip.get(i-1,j+1)
861 +ip.get(i+1, j-1) + 2* ip.get(i+1,j) + ip.get(i+1,j+1);
863 double gradY = (double) -ip.get(i-1, j-1) - 2* ip.get(i,j-1) - ip.get(i+1,j-1)
864 +ip.get(i-1, j+1) + 2* ip.get(i,j+1) + ip.get(i+1,j+1);
866 double mag = Math.sqrt(gradX*gradX + gradY*gradY);
867 ipGrad.setf(i,j,(float) mag);
870 return ipGrad;
874 //tested the result against matlab routine, this worked fine
875 static double getXcorrBlackOut(ImageProcessor ip1, ImageProcessor ip2){
877 ip1 = ip1.convertToFloat();
878 ip2 = ip2.convertToFloat();
880 //If this is not done, the black area from the transformed image influences xcorr result
881 //better alternative would be to use mask images and only calculate xcorr of
882 //the region present in both images.
883 for (int i=0; i<ip1.getWidth(); i++){
884 for (int j=0; j<ip1.getHeight(); j++){
885 if (ip1.get(i,j) == 0)
886 ip2.set(i,j,0);
887 if (ip2.get(i,j) == 0)
888 ip1.set(i,j,0);
892 // FloatProcessor ip1f = (FloatProcessor)ip1.convertToFloat();
893 // FloatProcessor ip2f = (FloatProcessor)ip2.convertToFloat();
895 float[] data1 = ( float[] )ip1.getPixels();
896 float[] data2 = ( float[] )ip2.getPixels();
898 double[] data1b = new double[data1.length];
899 double[] data2b = new double[data2.length];
901 int count = 0;
902 double mean1 = 0.0, mean2 = 0.0;
904 for (int i=0; i < data1.length; i++){
905 //if ((data1[i] == 0) || (data2[i] == 0))
906 //continue;
907 data1b[i] = data1[i];
908 data2b[i] = data2[i];
909 mean1 += data1b[i];
910 mean2 += data2b[i];
911 count++;
914 mean1 /= (double) count;
915 mean2 /= (double) count;
917 double L2_1 = 0.0, L2_2 = 0.0;
918 for (int i=0; i < count; i++){
919 L2_1 += (data1b[i] - mean1) * (data1b[i] - mean1);
920 L2_2 += (data2b[i] - mean2) * (data2b[i] - mean2);
923 L2_1 = Math.sqrt(L2_1);
924 L2_2 = Math.sqrt(L2_2);
926 double xcorr = 0.0;
927 for (int i=0; i < count; i++){
928 xcorr += ((data1b[i]-mean1) / L2_1) * ((data2b[i]-mean2) / L2_2);
931 //System.out.println("XcorrVal: " + xcorr);
932 return xcorr;
936 void visualizePoints( List< List< PointMatch > > inliers)
938 ColorProcessor ip = new ColorProcessor(nlt.getWidth(), nlt.getHeight());
939 ip.setColor(Color.red);
941 ip.setLineWidth(5);
942 for (int i=0; i < inliers.size(); i++){
943 for (int j=0; j < inliers.get(i).size(); j++){
944 float[] tmp1 = inliers.get(i).get(j).getP1().getW();
945 float[] tmp2 = inliers.get(i).get(j).getP2().getL();
946 ip.setColor(Color.red);
947 ip.drawDot((int) tmp2[0], (int) tmp2[1]);
948 ip.setColor(Color.blue);
949 ip.drawDot((int) tmp1[0], (int) tmp1[1]);
953 ImagePlus points = new ImagePlus("Corresponding Points after correction", ip);
954 points.show();
957 public void getTransform(double[][] points1, double[][] points2, double[][] transformParams){
958 double[][] p1 = new double[points1.length][3];
959 double[][] p2 = new double[points2.length][3];
961 for (int i=0; i < points1.length; i++){
962 p1[i][0] = points1[i][0];
963 p1[i][1] = points1[i][1];
964 p1[i][2] = 100.0;
966 p2[i][0] = points2[i][0];
967 p2[i][1] = points2[i][1];
968 p2[i][2] = 100.0;
972 Matrix s1 = new Matrix(p1);
973 Matrix s2 = new Matrix(p2);
974 Matrix t = (s1.transpose().times(s1)).inverse().times(s1.transpose()).times(s2);
975 t = t.inverse();
976 for (int i=0; i < transformParams.length; i++){
977 if (transformParams[i][0] == -10){
978 transformParams[i][0] = t.get(0,0);
979 transformParams[i][1] = t.get(0,1);
980 transformParams[i][2] = t.get(1,0);
981 transformParams[i][3] = t.get(1,1);
982 transformParams[i][4] = t.get(2,0);
983 transformParams[i][5] = t.get(2,1);
987 t.print(1, 1);
993 static List< Feature >[] extractSIFTFeaturesThreaded(
994 final int numberOfImages, final String directory,
995 final String[] names ){
996 //extract all SIFT Features
998 final List< Feature >[] siftFeatures = new ArrayList[numberOfImages];
999 final Thread[] threads = MultiThreading.newThreads();
1000 final AtomicInteger ai = new AtomicInteger(0); // start at second slice
1002 IJ.showStatus("Extracting SIFT Features");
1003 for (int ithread = 0; ithread < threads.length; ++ithread) {
1004 threads[ithread] = new Thread() {
1005 public void run() {
1006 for (int i = ai.getAndIncrement(); i < numberOfImages; i = ai.getAndIncrement())
1008 final ArrayList< Feature > fs = new ArrayList< Feature >();
1009 ImagePlus imps = new Opener().openImage(directory + names[i + sp.firstImageIndex]);
1010 imps.setProcessor(imps.getTitle(), imps.getProcessor().convertToFloat());
1012 FloatArray2DSIFT sift = new FloatArray2DSIFT( sp.sift.clone() );
1013 SIFT ijSIFT = new SIFT( sift );
1015 ijSIFT.extractFeatures( imps.getProcessor(), fs );
1017 Collections.sort( fs );
1018 IJ.log("Extracting SIFT of image: "+i);
1020 siftFeatures[i]=fs;
1026 MultiThreading.startAndJoin(threads);
1028 return siftFeatures;
1031 static protected void extractSIFTPointsThreaded(
1032 final int index,
1033 final List< Feature >[] siftFeatures,
1034 final List< PointMatch >[] inliers,
1035 final AbstractAffineModel2D< ? >[] models )
1038 // save all matching candidates
1039 final List< PointMatch >[] candidates = new List[ siftFeatures.length - 1 ];
1041 final Thread[] threads = MultiThreading.newThreads();
1042 final AtomicInteger ai = new AtomicInteger( 0 ); // start at second
1043 // slice
1045 for ( int ithread = 0; ithread < threads.length; ++ithread )
1047 threads[ ithread ] = new Thread()
1049 public void run()
1051 setPriority( Thread.NORM_PRIORITY );
1053 for ( int j = ai.getAndIncrement(); j < candidates.length; j = ai.getAndIncrement() )
1055 int i = ( j < index ? j : j + 1 );
1056 candidates[ j ] = FloatArray2DSIFT.createMatches( siftFeatures[ index ], siftFeatures[ i ], 1.5f, null, Float.MAX_VALUE, 0.5f );
1062 MultiThreading.startAndJoin( threads );
1064 // get rid of the outliers and save the rigid transformations to match
1065 // the inliers
1067 final AtomicInteger ai2 = new AtomicInteger( 0 );
1068 for ( int ithread = 0; ithread < threads.length; ++ithread )
1070 threads[ ithread ] = new Thread()
1072 public void run()
1074 setPriority( Thread.NORM_PRIORITY );
1075 for ( int i = ai2.getAndIncrement(); i < candidates.length; i = ai2.getAndIncrement() )
1078 List< PointMatch > tmpInliers = new ArrayList< PointMatch >();
1079 // RigidModel2D m =
1080 // RigidModel2D.estimateBestModel(candidates.get(i),
1081 // tmpInliers, sp.min_epsilon, sp.max_epsilon,
1082 // sp.min_inlier_ratio);
1084 final AbstractAffineModel2D< ? > m;
1085 switch ( sp.expectedModelIndex )
1087 case 0:
1088 m = new TranslationModel2D();
1089 break;
1090 case 1:
1091 m = new RigidModel2D();
1092 break;
1093 case 2:
1094 m = new SimilarityModel2D();
1095 break;
1096 case 3:
1097 m = new AffineModel2D();
1098 break;
1099 default:
1100 return;
1103 boolean modelFound = false;
1106 modelFound = m.filterRansac( candidates[ i ], tmpInliers, 1000, sp.maxEpsilon, sp.minInlierRatio, 10 );
1108 catch ( NotEnoughDataPointsException e )
1110 modelFound = false;
1113 if ( modelFound )
1114 IJ.log( "Model found:\n " + candidates[ i ].size() + " candidates\n " + tmpInliers.size() + " inliers\n " + String.format( "%.2f", m.getCost() ) + "px average displacement" );
1115 else
1116 IJ.log( "No Model found." );
1118 inliers[ index * ( sp.numberOfImages - 1 ) + i ] = tmpInliers;
1119 models[ index * ( sp.numberOfImages - 1 ) + i ] = m;
1120 // System.out.println("**** MODEL ADDED: " +
1121 // (index*(sp.numberOfImages-1)+i));
1127 MultiThreading.startAndJoin( threads );