001/******************************************************************************* 002 * This software is provided as a supplement to the authors' textbooks on digital 003 * image processing published by Springer-Verlag in various languages and editions. 004 * Permission to use and distribute this software is granted under the BSD 2-Clause 005 * "Simplified" License (see http://opensource.org/licenses/BSD-2-Clause). 006 * Copyright (c) 2006-2023 Wilhelm Burger, Mark J. Burge. All rights reserved. 007 * Visit https://imagingbook.com for additional details. 008 ******************************************************************************/ 009package imagingbook.common.math.nonlinear; 010 011import org.apache.commons.math3.analysis.MultivariateMatrixFunction; 012import org.apache.commons.math3.analysis.MultivariateVectorFunction; 013import org.apache.commons.math3.fitting.leastsquares.GaussNewtonOptimizer; 014import org.apache.commons.math3.fitting.leastsquares.LeastSquaresFactory; 015import org.apache.commons.math3.fitting.leastsquares.LeastSquaresOptimizer; 016import org.apache.commons.math3.fitting.leastsquares.LeastSquaresOptimizer.Optimum; 017import org.apache.commons.math3.fitting.leastsquares.LeastSquaresProblem; 018import org.apache.commons.math3.fitting.leastsquares.LevenbergMarquardtOptimizer; 019import org.apache.commons.math3.linear.RealVector; 020import org.apache.commons.math3.optim.SimpleVectorValueChecker; 021 022import static org.apache.commons.math3.fitting.leastsquares.LeastSquaresFactory.model; 023 024/** 025 * <p> 026 * This class defines static methods for simplified access to nonlinear least-squares solvers in Apache Commons Math, 027 * hiding much of the available configuration options. If other than default settings are needed, the original (Apache 028 * Commons Math) API should be used. See the Appendix C of [1] for additional details and examples. 029 * </p> 030 * <p> 031 * [1] W. Burger, M.J. Burge, <em>Digital Image Processing – An Algorithmic Introduction</em>, 3rd ed, Springer 032 * (2022). 033 * </p> 034 * 035 * @author WB 036 */ 037public abstract class NonlinearLeastSquares { 038 039 private NonlinearLeastSquares() {} 040 041 public static int MaxEvaluations = 1000; 042 public static int MaxIterations = 1000; 043 public static double Tolerance = 1e-6; 044 045 /** 046 * Solves the nonlinear least-squares problem defined by the arguments. Delegates to the Levenberg-Marquardt 047 * method. 048 * 049 * @param V the "value" function, V(p) must return a vector for the current parameters p 050 * @param J the "Jacobian" function, J(p) must return a matrix for the current parameters p 051 * @param z the vector of observed ("target") values 052 * @param p0 initial parameter vector 053 * @return the vector of optimal parameters 054 */ 055 public static RealVector solveNLS(MultivariateVectorFunction V, MultivariateMatrixFunction J, 056 RealVector z, RealVector p0) { 057 return solveLevenvergMarquardt(V, J, z, p0); 058 } 059 060 /** 061 * Solves the nonlinear least-squares problem defined by the arguments using Levenberg-Marquardt optimization. 062 * 063 * @param V the "value" function, V(p) must return a vector for the current parameters p 064 * @param J the "Jacobian" function, J(p) must return a matrix for the current parameters p 065 * @param z the vector of observed ("target") values 066 * @param p0 initial parameter vector 067 * @return the vector of optimal parameters 068 */ 069 public static RealVector solveLevenvergMarquardt(MultivariateVectorFunction V, MultivariateMatrixFunction J, 070 RealVector z, RealVector p0) { 071 LeastSquaresProblem problem = makeProblem(V, J, z, p0); 072 LeastSquaresOptimizer optimizer = new LevenbergMarquardtOptimizer(); 073 Optimum solution = optimizer.optimize(problem); 074 //System.out.println("iterations = " + solution.getIterations()); 075 return solution.getPoint(); 076 } 077 078 /** 079 * Solves the nonlinear least-squares problem defined by the arguments using Gauss-Newton optimization. 080 * 081 * @param V the "value" function, V(p) must return a vector for the current parameters p 082 * @param J the "Jacobian" function, J(p) must return a matrix for the current parameters p 083 * @param z the vector of observed ("target") values 084 * @param p0 initial parameter vector 085 * @return the vector of optimal parameters 086 */ 087 public static RealVector solveGaussNewton(MultivariateVectorFunction V, MultivariateMatrixFunction J, 088 RealVector z, RealVector p0) { 089 LeastSquaresProblem problem = makeProblem(V, J, z, p0); 090 LeastSquaresOptimizer optimizer = new GaussNewtonOptimizer(); 091 Optimum solution = optimizer.optimize(problem); 092 //System.out.println("iterations = " + solution.getIterations()); 093 return solution.getPoint(); 094 } 095 096 // -------------------------------------------------------------------- 097 098 private static LeastSquaresProblem makeProblem(MultivariateVectorFunction V, MultivariateMatrixFunction J, 099 RealVector observed, RealVector start) { 100 return LeastSquaresFactory.create(model(V, J), observed, start, 101 LeastSquaresFactory.evaluationChecker(new SimpleVectorValueChecker(Tolerance, Tolerance)), 102 MaxEvaluations, MaxIterations); 103 } 104 105 // -------------------------------------------------------------------- 106 // book version 107 // -------------------------------------------------------------------- 108 109 public static RealVector solveNLS2( 110 MultivariateVectorFunction V, 111 MultivariateMatrixFunction J, 112 RealVector z, RealVector p0) 113 { 114 LeastSquaresProblem problem = 115 LeastSquaresFactory.create(model(V, J), z, p0, null, 116 MaxEvaluations, MaxIterations); 117 LeastSquaresOptimizer optimizer = new LevenbergMarquardtOptimizer(); 118 Optimum solution = optimizer.optimize(problem); 119// System.out.println("iterations = " + solution.getIterations()); 120 if (solution.getIterations() > MaxIterations) 121 return null; 122 else 123 return solution.getPoint(); 124 } 125 126}