consistently implemented invertible coordinatetransforms and the
[trakem2.git] / lenscorrection / Distortion_Correction.java
blob9a1c9eb80bdcbf373420b78f838b67564ac9fe84
1 /**
3 ImageJ plugin
4 Copyright (C) 2008 Verena Kaynig.
6 This program is free software; you can redistribute it and/or
7 modify it under the terms of the GNU General Public License
8 as published by the Free Software Foundation (http://www.gnu.org/licenses/gpl.txt )
10 This program is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 GNU General Public License for more details.
15 You should have received a copy of the GNU General Public License
16 along with this program; if not, write to the Free Software
17 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
18 **/
21 /* **************************************************************************** *
22 * This ImageJ Plugin estimates a non linear lens distortion in *
23 * the images given and corrects all images with the same transformation *
24 * *
25 * *
26 * TODO: *
27 * - show histogram of beta coefficients to evaluate kernel dimension needed *
28 * - show SIFT matches before and after correction? *
29 * - give numerical xcorr results in a better manner *
30 * - show visual impression of the stitching ? *
31 * - DOKUMENTATION *
32 * - do mask images properly to incorporate into TrakEM *
33 * *
34 * Author: Verena Kaynig *
35 * Kontakt: verena.kaynig@inf.ethz.ch *
36 * *
37 * SIFT implementation: Stephan Saalfeld *
38 * **************************************************************************** */
40 package lenscorrection;
42 import ij.IJ;
43 import ij.gui.GenericDialog;
44 import ij.plugin.PlugIn;
45 import ij.process.ColorProcessor;
46 import ij.process.ImageProcessor;
47 import ij.ImagePlus;
48 import ij.io.DirectoryChooser;
49 import ij.io.Opener;
50 import ij.io.FileSaver;
52 import mpi.fruitfly.general.MultiThreading;
53 import mpicbg.models.*;
54 import mpicbg.ij.SIFT;
55 import mpicbg.imagefeatures.*;
57 import java.util.ArrayList;
58 import java.util.Arrays;
59 import java.util.Collection;
60 import java.util.Collections;
61 import java.util.HashMap;
62 import java.util.List;
63 import java.util.Map;
64 import java.util.concurrent.atomic.AtomicInteger;
65 import java.awt.Color;
66 import java.awt.geom.AffineTransform;
67 import java.io.*;
69 import org.jfree.chart.*;
70 import org.jfree.chart.plot.PlotOrientation;
71 import org.jfree.data.category.DefaultCategoryDataset;
73 import Jama.Matrix;
76 public class Distortion_Correction implements PlugIn{
78 static public class PointMatchCollectionAndAffine
80 final public AffineTransform affine;
81 final public Collection< PointMatch > pointMatches;
82 public PointMatchCollectionAndAffine( final AffineTransform affine, final Collection< PointMatch > pointMatches )
84 this.affine = affine;
85 this.pointMatches = pointMatches;
89 static public class BasicParam
91 final public FloatArray2DSIFT.Param sift = new FloatArray2DSIFT.Param();
93 /**
94 * Closest/next closest neighbour distance ratio
96 public float rod = 0.92f;
98 /**
99 * Maximal allowed alignment error in px
101 public float maxEpsilon = 32.0f;
104 * Inlier/candidates ratio
106 public float minInlierRatio = 0.2f;
109 * Implemeted transformation models for choice
111 final static public String[] modelStrings = new String[]{ "Translation", "Rigid", "Similarity", "Affine" };
112 public int expectedModelIndex = 1;
115 * Order of the polynomial kernel
117 public int dimension = 5;
120 * Regularization factor
122 public double lambda = 0.01;
124 public BasicParam()
126 sift.fdSize = 8;
127 sift.maxOctaveSize = 1600;
128 sift.minOctaveSize = 400;
131 public void addFields( final GenericDialog gd )
133 SIFT.addFields( gd, sift );
135 gd.addNumericField( "closest/next_closest_ratio :", rod, 2 );
137 gd.addMessage( "Geometric Consensus Filter :" );
138 gd.addNumericField( "maximal_alignment_error :", maxEpsilon, 2, 6, "px" );
139 gd.addNumericField( "inlier_ratio :", minInlierRatio, 2 );
140 gd.addChoice( "expected_transformation :", modelStrings, modelStrings[ expectedModelIndex ] );
142 gd.addMessage( "Lens Model :" );
143 gd.addNumericField( "power_of_polynomial_kernel :", dimension, 0 );
144 gd.addNumericField( "lambda :", lambda, 6 );
147 public boolean readFields( final GenericDialog gd )
149 SIFT.readFields( gd, sift );
151 rod = ( float )gd.getNextNumber();
153 maxEpsilon = ( float )gd.getNextNumber();
154 minInlierRatio = ( float )gd.getNextNumber();
155 expectedModelIndex = gd.getNextChoiceIndex();
157 dimension = ( int )gd.getNextNumber();
158 lambda = ( double )gd.getNextNumber();
160 return !gd.invalidNumber();
163 public boolean setup( final String title )
165 final GenericDialog gd = new GenericDialog( title );
166 addFields( gd );
169 gd.showDialog();
170 if ( gd.wasCanceled() ) return false;
172 while ( !readFields( gd ) );
174 return true;
177 @Override
178 public BasicParam clone()
180 final BasicParam p = new BasicParam();
181 p.sift.set( sift );
182 p.dimension = dimension;
183 p.expectedModelIndex = expectedModelIndex;
184 p.lambda = lambda;
185 p.maxEpsilon = maxEpsilon;
186 p.minInlierRatio = minInlierRatio;
187 p.rod = rod;
189 return p;
193 static public class PluginParam extends BasicParam
195 public String source_dir = "";
196 public String target_dir = "";
197 public String saveFileName = "distCorr.txt";
199 public boolean applyCorrection = true;
200 public boolean visualizeResults = true;
202 public int numberOfImages = 9;
203 public int firstImageIndex = 0;
206 * Original and calibrated images are supposed to have the same names,
207 * but are in different directories
209 public String[] names;
211 public int saveOrLoad = 0;
213 @Override
214 public void addFields( final GenericDialog gd )
216 super.addFields( gd );
219 @Override
220 public boolean readFields( final GenericDialog gd )
222 super.readFields( gd );
224 return !gd.invalidNumber();
228 * Setup as a three step dialog.
230 @Override
231 public boolean setup( final String title )
233 source_dir = "";
234 while ( source_dir == "" )
236 final DirectoryChooser dc = new DirectoryChooser( "Calibration Images" );
237 source_dir = dc.getDirectory();
238 if ( null == source_dir ) return false;
240 source_dir = source_dir.replace( '\\', '/' );
241 if ( !source_dir.endsWith( "/" ) ) source_dir += "/";
244 final String exts = ".tif.jpg.png.gif.tiff.jpeg.bmp.pgm";
245 names = new File( source_dir ).list(
246 new FilenameFilter()
248 public boolean accept( File dir, String name )
250 int idot = name.lastIndexOf( '.' );
251 if ( -1 == idot ) return false;
252 return exts.contains( name.substring( idot ).toLowerCase() );
254 } );
255 Arrays.sort( names );
257 final GenericDialog gd = new GenericDialog( title );
259 gd.addNumericField( "number_of_images :", 9, 0 );
260 gd.addChoice( "first_image :", names, names[ 0 ] );
261 gd.addNumericField( "power_of_polynomial_kernel :", dimension, 0 );
262 gd.addNumericField( "lambda :", lambda, 6 );
263 gd.addCheckbox( "apply_correction_to_images", applyCorrection );
264 gd.addCheckbox( "visualize results", visualizeResults );
265 final String[] options = new String[]{ "save", "load" };
266 gd.addChoice( "What to do? ", options, options[ saveOrLoad ] );
267 gd.addStringField( "file_name: ", saveFileName );
268 gd.showDialog();
270 if (gd.wasCanceled()) return false;
272 numberOfImages = ( int )gd.getNextNumber();
273 firstImageIndex = gd.getNextChoiceIndex();
274 dimension = ( int )gd.getNextNumber();
275 lambda = gd.getNextNumber();
276 applyCorrection = gd.getNextBoolean();
277 visualizeResults = gd.getNextBoolean();
278 saveOrLoad = gd.getNextChoiceIndex();
279 saveFileName = gd.getNextString();
281 if ( saveOrLoad == 0 || visualizeResults )
283 final GenericDialog gds = new GenericDialog( title );
284 SIFT.addFields( gds, sift );
286 gds.addNumericField( "closest/next_closest_ratio :", rod, 2 );
288 gds.addMessage( "Geometric Consensus Filter:" );
289 gds.addNumericField( "maximal_alignment_error :", maxEpsilon, 2, 6, "px" );
290 gds.addNumericField( "inlier_ratio :", minInlierRatio, 2 );
291 gds.addChoice( "expected_transformation :", modelStrings, modelStrings[ expectedModelIndex ] );
293 gds.showDialog();
294 if ( gds.wasCanceled() ) return false;
296 SIFT.readFields( gds, sift );
298 rod = ( float )gds.getNextNumber();
300 maxEpsilon = ( float )gds.getNextNumber();
301 minInlierRatio = ( float )gds.getNextNumber();
302 expectedModelIndex = gds.getNextChoiceIndex();
304 return !( gd.invalidNumber() || gds.invalidNumber() );
307 return !gd.invalidNumber();
311 static final public BasicParam p = new BasicParam();
312 static final public PluginParam sp = new PluginParam();
314 NonLinearTransform nlt = new NonLinearTransform();
315 AbstractAffineModel2D< ? >[] models;
317 public void run(String arg)
319 if ( !sp.setup( "Lens Correction" ) ) return;
321 IJ.log( sp.source_dir + sp.names[ 0 ] );
322 final ImagePlus imgTmp = new Opener().openImage( sp.source_dir + sp.names[ 0 ] );
323 final int imageWidth = imgTmp.getWidth(), imageHeight=imgTmp.getHeight();
324 /** imgTmp was just needed to get width and height of the images */
325 imgTmp.flush();
327 List< List< PointMatch > > inliers = null;
328 List< Feature >[] siftFeatures = extractSIFTFeaturesThreaded( sp.numberOfImages, sp.source_dir, sp.names );
330 List< PointMatch >[] inliersTmp = new ArrayList[ sp.numberOfImages * ( sp.numberOfImages - 1 ) ];
331 models = new AbstractAffineModel2D[ sp.numberOfImages * ( sp.numberOfImages - 1 ) ];
333 IJ.showStatus( "Estimating Correspondences" );
334 for( int i = 0; i < sp.numberOfImages; ++i )
336 IJ.log( "Estimating correspondences of image " + i );
337 IJ.showProgress( ( i + 1 ), sp.numberOfImages );
338 extractSIFTPointsThreaded( i, siftFeatures, inliersTmp, models );
341 int wholeCount = 0;
342 inliers = new ArrayList< List< PointMatch > >();
343 for ( int i = 0; i < inliersTmp.length; i++ )
345 // if vector at inliersTmp[i] contains only one null element,
346 // its size is still 1
347 if (inliersTmp[i].size() > 10){
348 wholeCount += inliersTmp[i].size();
350 //if I do not do this then the models have not the
351 //right index corresponding to the inliers positions in the vector
352 inliers.add(inliersTmp[i]);
355 //this is really really ugly
356 double[][] tp = new double[wholeCount][6];
357 double h1[][] = new double[wholeCount][2];
358 double h2[][] = new double[wholeCount][2];
359 int count = 0;
360 for ( int i = 0; i < inliers.size(); ++i )
362 if ( inliers.get(i).size() > 10 )
364 double[][] points1 = new double[inliers.get(i).size()][2];
365 double[][] points2 = new double[inliers.get(i).size()][2];
367 for (int j=0; j < inliers.get(i).size(); j++){
369 float[] tmp1 = ((PointMatch) inliers.get(i).get(j)).getP1().getL();
370 float[] tmp2 = ((PointMatch) inliers.get(i).get(j)).getP2().getL();
372 points1[j][0] = (double) tmp1[0];
373 points1[j][1] = (double) tmp1[1];
374 points2[j][0] = (double) tmp2[0];
375 points2[j][1] = (double) tmp2[1];
377 h1[count] = new double[] {(double) tmp1[0], (double) tmp1[1]};
378 h2[count] = new double[] {(double) tmp2[0], (double) tmp2[1]};
380 models[i].createAffine().getMatrix(tp[count]);
381 count++;
386 if ( sp.saveOrLoad == 0 )
388 nlt = distortionCorrection( h1, h2, tp, sp.dimension, sp.lambda, imageWidth, imageHeight );
389 nlt.visualizeSmall( sp.lambda );
392 while( true )
394 final GenericDialog gdl = new GenericDialog( "New lambda?" );
395 gdl.addMessage( "If the distortion field shows a clear translation, \n it is likely that you need to increase lambda." );
396 gdl.addNumericField( "lambda :", sp.lambda, 6 );
397 gdl.showDialog();
398 if ( gdl.wasCanceled() ) break;
399 sp.lambda = gdl.getNextNumber();
400 nlt = distortionCorrection( h1, h2, tp, sp.dimension, sp.lambda, imageWidth, imageHeight );
401 nlt.visualizeSmall( sp.lambda );
403 nlt.save( sp.source_dir + sp.saveFileName );
405 //after all preprocessing is done, estimate the distortion correction transform
408 if ( sp.saveOrLoad == 1 )
410 nlt.load( sp.source_dir + sp.saveFileName );
411 nlt.print();
414 if ( sp.applyCorrection || sp.visualizeResults )
416 sp.target_dir = correctImages();
419 if ( sp.visualizeResults )
421 IJ.log( "call nlt.visualize()" );
422 nlt.visualize();
423 IJ.log( "call evaluateCorrection(inliers)" );
424 evaluateCorrection( inliers );
426 //System.out.println("FINISHED");
429 public void evaluateCorrection( List< List< PointMatch > > inliers)
431 IJ.showStatus("Evaluating Distortion Correction");
432 double[][] original = new double[ sp.numberOfImages][ 2 ];
433 double[][] corrected = new double[ sp.numberOfImages ][ 2 ];
436 for ( int i = sp.firstImageIndex; i < sp.numberOfImages; i++ )
438 original[ i ] = evaluateCorrectionXcorr( i, sp.source_dir );
439 corrected[ i ] = evaluateCorrectionXcorr( i, sp.target_dir );
441 DefaultCategoryDataset dataset = new DefaultCategoryDataset();
442 DefaultCategoryDataset datasetGain = new DefaultCategoryDataset();
443 DefaultCategoryDataset datasetGrad = new DefaultCategoryDataset();
445 for ( int i = 0; i < ( original.length ); ++i )
447 dataset.setValue(Math.abs(original[i][0]), "before", "image" + i);
448 dataset.setValue(Math.abs(corrected[i][0]), "after", "image" + i);
449 datasetGrad.setValue(Math.abs(original[i][1]), "before", "image" + i);
450 datasetGrad.setValue(Math.abs(corrected[i][1]), "after", "image" + i);
452 datasetGain.setValue(Math.abs(corrected[i][0]) - Math.abs(original[i][0]), "gray","image" + i);
453 datasetGain.setValue(Math.abs(corrected[i][1]) - Math.abs(original[i][1]), "grad","image" + i);
456 final JFreeChart chart = ChartFactory.createBarChart(
457 "Xcorr before and after correction",
458 "ImageNumber",
459 "Xcorr",
460 dataset,
461 PlotOrientation.VERTICAL,
462 false,
463 true, false);
464 final ImagePlus imp = new ImagePlus( "Plot", chart.createBufferedImage( 500, 300 ) );
465 imp.show();
467 final JFreeChart chartGrad = ChartFactory.createBarChart(
468 "XcorrGradient before and after correction",
469 "ImageNumber",
470 "Xcorr",
471 datasetGrad,
472 PlotOrientation.VERTICAL,
473 false,
474 true, false);
475 final ImagePlus impGrad = new ImagePlus( "Plot", chartGrad.createBufferedImage( 500, 300 ) );
476 impGrad.show();
478 final JFreeChart chartGain = ChartFactory.createBarChart(
479 "Gain in Xcorr",
480 "ImageNumber",
481 "Xcorr",
482 datasetGain,
483 PlotOrientation.VERTICAL,
484 false,
485 true, false);
486 final ImagePlus impGain = new ImagePlus( "Plot", chartGain.createBufferedImage( 500, 300 ) );
487 impGain.show();
489 visualizePoints( inliers );
491 //write xcorr data to file
492 String original0 = "", original1 = "", corrected0 = "", corrected1 = "", gain0 = "", gain1 = "";
493 for ( int i = 0; i < ( original.length ); ++i )
495 original0 = original0 + Double.toString(original[i][0]) + "; ";
496 original1 = original1 + Double.toString(original[i][1]) + "; ";
497 corrected0 = corrected0 + Double.toString(corrected[i][0]) + "; ";
498 corrected1 = corrected1 + Double.toString(corrected[i][1]) + "; ";
499 gain0 = gain0 + Double.toString(Math.abs(corrected[i][0]) - Math.abs(original[i][0])) + "; ";
500 gain1 = gain1 + Double.toString(Math.abs(corrected[i][1]) - Math.abs(original[i][1])) + "; ";
505 BufferedWriter out = new BufferedWriter(new FileWriter( sp.source_dir + "xcorrData.log" ) );
507 out.write( original0);
508 out.newLine();
509 out.newLine();
510 out.write(original1);
511 out.newLine();
512 out.newLine();
513 out.write(corrected0);
514 out.newLine();
515 out.newLine();
516 out.write(corrected1);
517 out.newLine();
518 out.newLine();
519 out.write(gain0);
520 out.newLine();
521 out.newLine();
522 out.write(gain1);
523 out.newLine();
524 out.close();
526 catch (Exception e){System.err.println("Error: " + e.getMessage());};
530 protected void extractSIFTPoints(
531 int index,
532 List< Feature >[] siftFeatures,
533 List< List< PointMatch > > inliers,
534 List< AbstractAffineModel2D< ? > > models )
537 //save all matching candidates
538 List< List< PointMatch > > candidates = new ArrayList< List< PointMatch > >();
540 for ( int j = 0; j < siftFeatures.length; j++ )
542 if ( index == j ) continue;
543 candidates.add( FloatArray2DSIFT.createMatches( siftFeatures[ index ], siftFeatures[ j ], 1.5f, null, Float.MAX_VALUE, 0.5f ) );
546 //get rid of the outliers and save the transformations to match the inliers
547 for ( int i = 0; i < candidates.size(); ++i )
549 List< PointMatch > tmpInliers = new ArrayList< PointMatch >();
551 final AbstractAffineModel2D< ? > m;
552 switch ( sp.expectedModelIndex )
554 case 0:
555 m = new TranslationModel2D();
556 break;
557 case 1:
558 m = new RigidModel2D();
559 break;
560 case 2:
561 m = new SimilarityModel2D();
562 break;
563 case 3:
564 m = new AffineModel2D();
565 break;
566 default:
567 return;
570 try{
571 m.filterRansac(
572 candidates.get( i ),
573 tmpInliers,
574 1000,
575 sp.maxEpsilon,
576 sp.minInlierRatio,
577 10 );
579 catch( NotEnoughDataPointsException e ) { e.printStackTrace(); }
581 inliers.add( tmpInliers );
582 models.add( m );
587 * Estimates a polynomial distortion model for a set of
588 * {@linkplain PointMatch corresponding points} and returns the inverse of
589 * this model which then may be used to undo the distortion.
591 * @param matches
592 * @param affines
593 * a list of affines in the same order as matches that transfer a match
594 * collection into a common image frame
595 * @param dimension the order of the polynomial model
596 * @param lambda regularization factor
597 * @param imageWidth
598 * @param imageHeight
600 * @return a polynomial distortion model to undo the distortion
602 static public NonLinearTransform createInverseDistortionModel(
603 final Collection< PointMatchCollectionAndAffine > pointMatches,
604 final int dimension,
605 final double lambda,
606 final int imageWidth,
607 final int imageHeight )
609 int wholeCount = 0;
610 for ( final PointMatchCollectionAndAffine pma : pointMatches )
611 if ( pma.pointMatches.size() > 10 )
612 wholeCount += pma.pointMatches.size();
614 final double[][] tp = new double[ wholeCount ][ 6 ];
615 final double h1[][] = new double[ wholeCount ][ 2 ];
616 final double h2[][] = new double[ wholeCount ][ 2 ];
617 int count = 0;
618 for ( final PointMatchCollectionAndAffine pma : pointMatches )
620 if ( pma.pointMatches.size() > 10 )
622 final double[][] points1 = new double[ pma.pointMatches.size() ][ 2 ];
623 final double[][] points2 = new double[ pma.pointMatches.size() ][ 2 ];
625 int i = 0;
626 for ( final PointMatch match : pma.pointMatches )
628 final float[] tmp1 = match.getP1().getL();
629 final float[] tmp2 = match.getP2().getL();
631 points1[ i ][ 0 ] = ( double )tmp1[ 0 ];
632 points1[ i ][ 1 ] = ( double )tmp1[ 1 ];
633 points2[ i ][ 0 ] = ( double )tmp2[ 0 ];
634 points2[ i ][ 1 ] = ( double )tmp2[ 1 ];
636 h1[ count ] = new double[]{ ( double ) tmp1[ 0 ], ( double ) tmp1[ 1 ] };
637 h2[ count ] = new double[]{ ( double ) tmp2[ 0 ], ( double ) tmp2[ 1 ] };
639 pma.affine.getMatrix( tp[ count ] );
640 count++;
642 ++i;
647 NonLinearTransform nlt = distortionCorrection(h1, h2, tp, dimension, lambda, imageWidth, imageHeight);
648 nlt.inverseTransform( h1 );
650 return nlt;
653 static protected NonLinearTransform distortionCorrection(double hack1[][], double hack2[][], double transformParams[][], int dimension, double lambda, int w, int h){
654 IJ.showStatus("Getting the Distortion Field");
655 NonLinearTransform nlt = new NonLinearTransform(dimension, w, h);
657 double expandedX[][] = nlt.kernelExpandMatrixNormalize(hack1);
658 double expandedY[][] = nlt.kernelExpandMatrix(hack2);
660 int s = expandedX[0].length;
661 Matrix S1 = new Matrix(2*s, 2*s);
662 Matrix S2 = new Matrix(2*s,1);
664 for (int i=0; i < expandedX.length; i++){
665 Matrix xk_ij = new Matrix(expandedX[i],1);
666 Matrix xk_ji = new Matrix(expandedY[i],1);
668 Matrix yk1a = xk_ij.minus(xk_ji.times(transformParams[i][0]));
669 Matrix yk1b = xk_ij.times(0.0).minus(xk_ji.times(-transformParams[i][2]));
670 Matrix yk2a = xk_ij.times(0.0).minus(xk_ji.times(-transformParams[i][1]));
671 Matrix yk2b = xk_ij.minus(xk_ji.times(transformParams[i][3]));
673 Matrix y = new Matrix(2,2*s);
674 y.setMatrix(0, 0, 0, s-1, yk1a);
675 y.setMatrix(0, 0, s, 2*s-1, yk1b);
676 y.setMatrix(1, 1, 0, s-1, yk2a);
677 y.setMatrix(1, 1, s, 2*s-1, yk2b);
679 Matrix xk = new Matrix(2,2*expandedX[0].length);
680 xk.setMatrix(0, 0, 0, s-1, xk_ij);
681 xk.setMatrix(1, 1, s, 2*s-1, xk_ij);
683 double[] vals = {hack1[i][0], hack1[i][1]};
684 Matrix c = new Matrix(vals, 2);
686 Matrix X = xk.transpose().times(xk).times(lambda);
687 Matrix Y = y.transpose().times(y);
689 S1 = S1.plus(Y.plus(X));
691 double trans1 = (transformParams[i][2]* transformParams[i][5] - transformParams[i][0]*transformParams[i][4]);
692 double trans2 = (transformParams[i][1]* transformParams[i][4] - transformParams[i][3]*transformParams[i][5]);
693 double[] trans = {trans1, trans2};
696 Matrix translation = new Matrix(trans, 2);
697 Matrix YT = y.transpose().times(translation);
698 Matrix XC = xk.transpose().times(c).times(lambda);
700 S2 = S2.plus(YT.plus(XC));
704 Matrix regularize = Matrix.identity(S1.getRowDimension(), S1.getColumnDimension());
705 Matrix beta = new Matrix(S1.plus(regularize.times(0.001)).inverse().times(S2).getColumnPackedCopy(),s);
707 nlt.setBeta(beta.getArray());
708 nlt.inverseTransform(hack1);
709 return nlt;
712 protected String correctImages()
714 if ( !sp.applyCorrection )
716 sp.target_dir = System.getProperty( "user.dir" ).replace( '\\', '/' ) + "/distCorr_tmp/";
717 System.out.println( "Tmp target directory: " + sp.target_dir );
719 if ( new File( sp.target_dir ).exists() )
721 System.out.println( "removing old tmp directory!" );
723 final String[] filesToDelete = new File( sp.target_dir ).list();
724 for ( int i = 0; i < filesToDelete.length; i++ )
726 System.out.println( filesToDelete[ i ] );
727 boolean deleted = new File( sp.target_dir + filesToDelete[ i ] ).delete();
728 if ( !deleted ) IJ.log( "Error: Could not remove temporary directory!" );
730 new File( sp.target_dir ).delete();
734 // Create one directory
735 boolean success = ( new File( sp.target_dir ) ).mkdir();
736 if ( success ) new File( sp.target_dir ).deleteOnExit();
738 catch ( Exception e )
740 IJ.showMessage( "Error! Could not create temporary directory. " + e.getMessage() );
743 if ( sp.target_dir == "" )
745 final DirectoryChooser dc = new DirectoryChooser( "Target Directory" );
746 sp.target_dir = dc.getDirectory();
747 if ( null == sp.target_dir ) return "";
748 sp.target_dir = sp.target_dir.replace( '\\', '/' );
749 if ( !sp.target_dir.endsWith( "/" ) ) sp.target_dir += "/";
752 final String[] namesTarget = new File( sp.target_dir ).list( new FilenameFilter()
754 public boolean accept( File dir, String namesTarget )
756 int idot = namesTarget.lastIndexOf( '.' );
757 if ( -1 == idot ) return false;
758 return namesTarget.contains( namesTarget.substring( idot ).toLowerCase() );
760 } );
762 if ( namesTarget.length > 0 ) IJ.showMessage( "Overwrite Message", "There are already images in that directory. These will be used for evaluation." );
763 else
766 IJ.showStatus( "Correcting Images" );
768 final Thread[] threads = MultiThreading.newThreads();
769 final AtomicInteger ai = new AtomicInteger( sp.applyCorrection ? 0 : sp.firstImageIndex );
771 for ( int ithread = 0; ithread < threads.length; ++ithread )
773 threads[ ithread ] = new Thread()
775 public void run()
777 setPriority( Thread.NORM_PRIORITY );
779 for ( int i = ai.getAndIncrement(); i < ( sp.applyCorrection ? sp.names.length : ( sp.firstImageIndex + sp.numberOfImages ) ); i = ai.getAndIncrement() )
781 IJ.log( "Correcting image " + sp.names[ i ] );
782 final ImagePlus imps = new Opener().openImage( sp.source_dir + sp.names[ i ] );
783 imps.setProcessor( imps.getTitle(), imps.getProcessor().convertToShort( false ) );
784 ImageProcessor[] transErg = nlt.transform( imps.getProcessor() );
785 imps.setProcessor( imps.getTitle(), transErg[ 0 ] );
786 if ( !sp.applyCorrection ) new File( sp.target_dir + sp.names[ i ] ).deleteOnExit();
787 new FileSaver( imps ).saveAsTiff( sp.target_dir + sp.names[ i ] );
792 MultiThreading.startAndJoin( threads );
794 return sp.target_dir;
797 protected double[] evaluateCorrectionXcorr( int index, String directory )
799 ImagePlus im1 = new Opener().openImage( directory + sp.names[ index ] );
800 im1.setProcessor( sp.names[ index ], im1.getProcessor().convertToShort( false ) );
802 int count = 0;
803 ArrayList< Double > xcorrVals = new ArrayList< Double >();
804 ArrayList< Double > xcorrValsGrad = new ArrayList< Double >();
806 for ( int i = 0; i < sp.numberOfImages; i++ )
808 if ( i == index )
810 continue;
812 if ( models[ index * ( sp.numberOfImages - 1 ) + count ] == null )
814 count++;
815 continue;
818 ImagePlus newImg = new Opener().openImage( directory + sp.names[ i + sp.firstImageIndex ] );
819 newImg.setProcessor( newImg.getTitle(), newImg.getProcessor().convertToShort( false ) );
821 newImg.setProcessor( sp.names[ i + sp.firstImageIndex ], applyTransformToImageInverse( models[ index * ( sp.numberOfImages - 1 ) + count ], newImg.getProcessor() ) );
822 ImageProcessor testIp = im1.getProcessor().duplicate();
824 // If you want to see the stitching improvement run this
825 // for ( int x=0; x < testIp.getWidth(); x++){
826 // for (int y=0; y < testIp.getHeight(); y++){
827 // testIp.set(x, y, Math.abs(im1.getProcessor().get(x,y) -
828 // newImg.getProcessor().get(x,y)));
829 // }
830 // }
832 // ImagePlus testImg = new ImagePlus(sp.names[index] + " minus " +
833 // sp.names[i], testIp);
834 // testImg.show();
835 // im1.show();
836 // newImg.show();
838 xcorrVals.add( getXcorrBlackOut( im1.getProcessor(), newImg.getProcessor() ) );
840 xcorrValsGrad.add( getXcorrBlackOutGradient( im1.getProcessor(), newImg.getProcessor() ) );
841 count++;
844 Collections.sort( xcorrVals );
845 Collections.sort( xcorrValsGrad );
847 double[] medians = { xcorrVals.get( xcorrVals.size() / 2 ), xcorrValsGrad.get( xcorrValsGrad.size() / 2 ) };
849 double m1 = 0.0, m2 = 0.0;
850 for ( int i = 0; i < xcorrVals.size(); i++ )
852 m1 += xcorrVals.get( i );
853 m2 += xcorrValsGrad.get( i );
856 m1 /= xcorrVals.size();
857 m2 /= xcorrVals.size();
859 double[] means = { m1, m2 };
861 return means;
862 //return medians;
865 ImageProcessor applyTransformToImageInverse(
866 AbstractAffineModel2D< ? > a, ImageProcessor ip){
867 ImageProcessor newIp = ip.duplicate();
868 newIp.max(0.0);
870 for (int x=0; x<ip.getWidth(); x++){
871 for (int y=0; y<ip.getHeight(); y++){
872 float[] position = {(float)x,(float)y};
873 // float[] newPosition = a.apply(position);
874 float[] newPosition = {0,0,};
877 newPosition = a.applyInverse(position);
879 catch ( NoninvertibleModelException e ) {}
881 int xn = (int) newPosition[0];
882 int yn = (int) newPosition[1];
884 if ( (xn >= 0) && (yn >= 0) && (xn < ip.getWidth()) && (yn < ip.getHeight()))
885 newIp.set(xn,yn,ip.get(x,y));
889 return newIp;
892 double getXcorrBlackOutGradient(ImageProcessor ip1, ImageProcessor ip2){
893 ImageProcessor ip1g = getGradientSobel(ip1);
894 ImageProcessor ip2g = getGradientSobel(ip2);
896 return getXcorrBlackOut(ip1g, ip2g);
899 //this blends out gradients that include black pixels to make the sharp border caused
900 //by the nonlinear transformation not disturb the gradient comparison
901 //FIXME: this should be handled by a mask image!
902 ImageProcessor getGradientSobel(ImageProcessor ip){
903 ImageProcessor ipGrad = ip.duplicate();
904 ipGrad.max(0.0);
906 for (int i=1; i<ipGrad.getWidth()-1; i++){
907 for (int j=1; j<ipGrad.getHeight()-1; j++){
908 if(ip.get(i-1,j-1)==0 || ip.get(i-1,j)==0 || ip.get(i-1,j+1)==0 ||
909 ip.get(i,j-1)==0 || ip.get(i,j)==0 || ip.get(i,j+1)==0 ||
910 ip.get(i+1,j-1)==0 || ip.get(i+1,j)==0 || ip.get(i+1,j+1)==0 )
911 continue;
913 double gradX = (double) -ip.get(i-1, j-1) - 2* ip.get(i-1,j) - ip.get(i-1,j+1)
914 +ip.get(i+1, j-1) + 2* ip.get(i+1,j) + ip.get(i+1,j+1);
916 double gradY = (double) -ip.get(i-1, j-1) - 2* ip.get(i,j-1) - ip.get(i+1,j-1)
917 +ip.get(i-1, j+1) + 2* ip.get(i,j+1) + ip.get(i+1,j+1);
919 double mag = Math.sqrt(gradX*gradX + gradY*gradY);
920 ipGrad.setf(i,j,(float) mag);
923 return ipGrad;
927 //tested the result against matlab routine, this worked fine
928 static double getXcorrBlackOut(ImageProcessor ip1, ImageProcessor ip2){
930 ip1 = ip1.convertToFloat();
931 ip2 = ip2.convertToFloat();
933 //If this is not done, the black area from the transformed image influences xcorr result
934 //better alternative would be to use mask images and only calculate xcorr of
935 //the region present in both images.
936 for (int i=0; i<ip1.getWidth(); i++){
937 for (int j=0; j<ip1.getHeight(); j++){
938 if (ip1.get(i,j) == 0)
939 ip2.set(i,j,0);
940 if (ip2.get(i,j) == 0)
941 ip1.set(i,j,0);
945 // FloatProcessor ip1f = (FloatProcessor)ip1.convertToFloat();
946 // FloatProcessor ip2f = (FloatProcessor)ip2.convertToFloat();
948 float[] data1 = ( float[] )ip1.getPixels();
949 float[] data2 = ( float[] )ip2.getPixels();
951 double[] data1b = new double[data1.length];
952 double[] data2b = new double[data2.length];
954 int count = 0;
955 double mean1 = 0.0, mean2 = 0.0;
957 for (int i=0; i < data1.length; i++){
958 //if ((data1[i] == 0) || (data2[i] == 0))
959 //continue;
960 data1b[i] = data1[i];
961 data2b[i] = data2[i];
962 mean1 += data1b[i];
963 mean2 += data2b[i];
964 count++;
967 mean1 /= (double) count;
968 mean2 /= (double) count;
970 double L2_1 = 0.0, L2_2 = 0.0;
971 for (int i=0; i < count; i++){
972 L2_1 += (data1b[i] - mean1) * (data1b[i] - mean1);
973 L2_2 += (data2b[i] - mean2) * (data2b[i] - mean2);
976 L2_1 = Math.sqrt(L2_1);
977 L2_2 = Math.sqrt(L2_2);
979 double xcorr = 0.0;
980 for (int i=0; i < count; i++){
981 xcorr += ((data1b[i]-mean1) / L2_1) * ((data2b[i]-mean2) / L2_2);
984 //System.out.println("XcorrVal: " + xcorr);
985 return xcorr;
989 void visualizePoints( List< List< PointMatch > > inliers)
991 ColorProcessor ip = new ColorProcessor(nlt.getWidth(), nlt.getHeight());
992 ip.setColor(Color.red);
994 ip.setLineWidth(5);
995 for (int i=0; i < inliers.size(); i++){
996 for (int j=0; j < inliers.get(i).size(); j++){
997 float[] tmp1 = inliers.get(i).get(j).getP1().getW();
998 float[] tmp2 = inliers.get(i).get(j).getP2().getL();
999 ip.setColor(Color.red);
1000 ip.drawDot((int) tmp2[0], (int) tmp2[1]);
1001 ip.setColor(Color.blue);
1002 ip.drawDot((int) tmp1[0], (int) tmp1[1]);
1006 ImagePlus points = new ImagePlus("", ip);
1007 points.show();
1010 public void getTransform(double[][] points1, double[][] points2, double[][] transformParams){
1011 double[][] p1 = new double[points1.length][3];
1012 double[][] p2 = new double[points2.length][3];
1014 for (int i=0; i < points1.length; i++){
1015 p1[i][0] = points1[i][0];
1016 p1[i][1] = points1[i][1];
1017 p1[i][2] = 100.0;
1019 p2[i][0] = points2[i][0];
1020 p2[i][1] = points2[i][1];
1021 p2[i][2] = 100.0;
1025 Matrix s1 = new Matrix(p1);
1026 Matrix s2 = new Matrix(p2);
1027 Matrix t = (s1.transpose().times(s1)).inverse().times(s1.transpose()).times(s2);
1028 t = t.inverse();
1029 for (int i=0; i < transformParams.length; i++){
1030 if (transformParams[i][0] == -10){
1031 transformParams[i][0] = t.get(0,0);
1032 transformParams[i][1] = t.get(0,1);
1033 transformParams[i][2] = t.get(1,0);
1034 transformParams[i][3] = t.get(1,1);
1035 transformParams[i][4] = t.get(2,0);
1036 transformParams[i][5] = t.get(2,1);
1040 t.print(1, 1);
1046 static List< Feature >[] extractSIFTFeaturesThreaded(
1047 final int numberOfImages, final String directory,
1048 final String[] names ){
1049 //extract all SIFT Features
1051 final List< Feature >[] siftFeatures = new ArrayList[numberOfImages];
1052 final Thread[] threads = MultiThreading.newThreads();
1053 final AtomicInteger ai = new AtomicInteger(0); // start at second slice
1055 IJ.showStatus("Extracting SIFT Features");
1056 for (int ithread = 0; ithread < threads.length; ++ithread) {
1057 threads[ithread] = new Thread() {
1058 public void run() {
1059 for (int i = ai.getAndIncrement(); i < numberOfImages; i = ai.getAndIncrement())
1061 final ArrayList< Feature > fs = new ArrayList< Feature >();
1062 ImagePlus imps = new Opener().openImage(directory + names[i + sp.firstImageIndex]);
1063 imps.setProcessor(imps.getTitle(), imps.getProcessor().convertToFloat());
1065 FloatArray2DSIFT sift = new FloatArray2DSIFT( sp.sift.clone() );
1066 SIFT ijSIFT = new SIFT( sift );
1068 ijSIFT.extractFeatures( imps.getProcessor(), fs );
1070 Collections.sort( fs );
1071 IJ.log("Extracting SIFT of image: "+i);
1073 siftFeatures[i]=fs;
1079 MultiThreading.startAndJoin(threads);
1081 return siftFeatures;
1084 static protected void extractSIFTPointsThreaded(
1085 final int index,
1086 final List< Feature >[] siftFeatures,
1087 final List< PointMatch >[] inliers,
1088 final AbstractAffineModel2D< ? >[] models )
1091 // save all matching candidates
1092 final List< PointMatch >[] candidates = new List[ siftFeatures.length - 1 ];
1094 final Thread[] threads = MultiThreading.newThreads();
1095 final AtomicInteger ai = new AtomicInteger( 0 ); // start at second
1096 // slice
1098 for ( int ithread = 0; ithread < threads.length; ++ithread )
1100 threads[ ithread ] = new Thread()
1102 public void run()
1104 setPriority( Thread.NORM_PRIORITY );
1106 for ( int j = ai.getAndIncrement(); j < candidates.length; j = ai.getAndIncrement() )
1108 int i = ( j < index ? j : j + 1 );
1109 candidates[ j ] = FloatArray2DSIFT.createMatches( siftFeatures[ index ], siftFeatures[ i ], 1.5f, null, Float.MAX_VALUE, 0.5f );
1115 MultiThreading.startAndJoin( threads );
1117 // get rid of the outliers and save the rigid transformations to match
1118 // the inliers
1120 final AtomicInteger ai2 = new AtomicInteger( 0 );
1121 for ( int ithread = 0; ithread < threads.length; ++ithread )
1123 threads[ ithread ] = new Thread()
1125 public void run()
1127 setPriority( Thread.NORM_PRIORITY );
1128 for ( int i = ai2.getAndIncrement(); i < candidates.length; i = ai2.getAndIncrement() )
1131 List< PointMatch > tmpInliers = new ArrayList< PointMatch >();
1132 // RigidModel2D m =
1133 // RigidModel2D.estimateBestModel(candidates.get(i),
1134 // tmpInliers, sp.min_epsilon, sp.max_epsilon,
1135 // sp.min_inlier_ratio);
1137 final AbstractAffineModel2D< ? > m;
1138 switch ( sp.expectedModelIndex )
1140 case 0:
1141 m = new TranslationModel2D();
1142 break;
1143 case 1:
1144 m = new RigidModel2D();
1145 break;
1146 case 2:
1147 m = new SimilarityModel2D();
1148 break;
1149 case 3:
1150 m = new AffineModel2D();
1151 break;
1152 default:
1153 return;
1156 boolean modelFound = false;
1159 modelFound = m.filterRansac( candidates[ i ], tmpInliers, 1000, sp.maxEpsilon, sp.minInlierRatio, 10 );
1161 catch ( NotEnoughDataPointsException e )
1163 modelFound = false;
1166 if ( modelFound )
1167 IJ.log( "Model found:\n " + candidates[ i ].size() + " candidates\n " + tmpInliers.size() + " inliers\n " + String.format( "%.2f", m.getCost() ) + "px average displacement" );
1168 else
1169 IJ.log( "No Model found." );
1171 inliers[ index * ( sp.numberOfImages - 1 ) + i ] = tmpInliers;
1172 models[ index * ( sp.numberOfImages - 1 ) + i ] = m;
1173 // System.out.println("**** MODEL ADDED: " +
1174 // (index*(sp.numberOfImages-1)+i));
1180 MultiThreading.startAndJoin( threads );