diff --git a/matlab/optimization/analytic_gradient_wrapper.m b/matlab/optimization/analytic_gradient_wrapper.m new file mode 100644 index 0000000000000000000000000000000000000000..8ef31deaff85f1815695bab04a9f6396d969a7a8 --- /dev/null +++ b/matlab/optimization/analytic_gradient_wrapper.m @@ -0,0 +1,37 @@ +function [fval, grad, hess, exit_flag]=analytic_gradient_wrapper(x, fcn, varargin) +%function [fval, grad, hess, exitflag]=analytic_gradient_wrapper(x, fcn, varargin) +% Encapsulates an objective function to be minimized for use with Matlab +% optimizers +% +% INPUTS +% - x [double] n*1 vector of instrument values. +% - fcn [fhandle] objective function. +% - varagin [cell] additional parameters for fcn. +% +% OUTPUTS +% - fval [double] scalar, value of the objective function at x. +% - grad gradient of the objective function +% - hess Hessian of the objective function +% - exit_flag [integer] scalar, flag returned by + +% Copyright (C) 2021 Dynare Team +% +% This file is part of Dynare. +% +% Dynare is free software: you can redistribute it and/or modify +% it under the terms of the GNU General Public License as published by +% the Free Software Foundation, either version 3 of the License, or +% (at your option) any later version. +% +% Dynare is distributed in the hope that it will be useful, +% but WITHOUT ANY WARRANTY; without even the implied warranty of +% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +% GNU General Public License for more details. +% +% You should have received a copy of the GNU General Public License +% along with Dynare. If not, see <http://www.gnu.org/licenses/>. + +[fval, info, exit_flag, grad, hess] = fcn(x, varargin{:}); +if size(grad,2)==1 + grad=grad'; %should be row vector for Matlab; exception lsqnonlin where Jacobian is required +end \ No newline at end of file diff --git a/matlab/optimization/dynare_minimize_objective.m b/matlab/optimization/dynare_minimize_objective.m index 6e1a773d39c8d0a4572c5f054cc15a481b31abb1..1e6100512e0848c71c3270efba6795ae022a91da 100644 --- a/matlab/optimization/dynare_minimize_objective.m +++ b/matlab/optimization/dynare_minimize_objective.m @@ -77,24 +77,37 @@ switch minimizer_algorithm % is not able to even move away from the initial point. optim_options = optimoptions(optim_options, 'Algorithm','active-set'); end + if options_.analytic_derivation || (isfield(options_,'mom') && options_.mom.analytic_jacobian==1) + optim_options = optimoptions(optim_options,'GradObj','on','TolX',1e-7); %alter default TolX + end if ~isempty(options_.optim_opt) eval(['optim_options = optimoptions(optim_options,' options_.optim_opt ');']); end if options_.silent_optimizer optim_options = optimoptions(optim_options,'display','off'); end - if options_.analytic_derivation - optim_options = optimoptions(optim_options,'GradObj','on','TolX',1e-7); %alter default TolX - end - if ~isoctave - [opt_par_values,fval,exitflag,output,lamdba,grad,hessian_mat] = ... - fmincon(objective_function,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options,varargin{:}); + if options_.analytic_derivation || (isfield(options_,'mom') && options_.mom.analytic_jacobian==1) %use wrapper + func = @(x) analytic_gradient_wrapper(x,objective_function,varargin{:}); + if ~isoctave + [opt_par_values,fval,exitflag,output,lamdba,grad,hessian_mat] = ... + fmincon(func,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options); + else + % Under Octave, use a wrapper, since fmincon() does not have an 11th + % arg. Also, only the first 4 output arguments are available. + [opt_par_values,fval,exitflag,output] = ... + fmincon(func,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options); + end else - % Under Octave, use a wrapper, since fmincon() does not have an 11th - % arg. Also, only the first 4 output arguments are available. - func = @(x) objective_function(x,varargin{:}); - [opt_par_values,fval,exitflag,output] = ... - fmincon(func,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options); + if ~isoctave + [opt_par_values,fval,exitflag,output,lamdba,grad,hessian_mat] = ... + fmincon(objective_function,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options,varargin{:}); + else + % Under Octave, use a wrapper, since fmincon() does not have an 11th + % arg. Also, only the first 4 output arguments are available. + func = @(x) objective_function(x,varargin{:}); + [opt_par_values,fval,exitflag,output] = ... + fmincon(func,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options); + end end case 2 @@ -159,20 +172,28 @@ switch minimizer_algorithm if ~isempty(options_.optim_opt) eval(['optim_options = optimoptions(optim_options,' options_.optim_opt ');']); end - if options_.analytic_derivation - optim_options = optimoptions(optim_options,'GradObj','on'); - end if options_.silent_optimizer optim_options = optimoptions(optim_options,'display','off'); end - if ~isoctave - [opt_par_values,fval,exitflag] = fminunc(objective_function,start_par_value,optim_options,varargin{:}); + if options_.analytic_derivation || (isfield(options_,'mom') && options_.mom.analytic_jacobian==1) + optim_options = optimoptions(optim_options,'GradObj','on'); + if ~isoctave + func = @(x) analytic_gradient_wrapper(x,objective_function,varargin{:}); + [opt_par_values,fval,exitflag] = fminunc(func,start_par_value,optim_options); + else + % Under Octave, use a wrapper, since fminunc() does not have a 4th arg + func = @(x) analytic_gradient_wrapper(x,objective_function,varargin{:}); + [opt_par_values,fval,exitflag] = fminunc(func,start_par_value,optim_options); + end else - % Under Octave, use a wrapper, since fminunc() does not have a 4th arg - func = @(x) objective_function(x,varargin{:}); - [opt_par_values,fval,exitflag] = fminunc(func,start_par_value,optim_options); + if ~isoctave + [opt_par_values,fval,exitflag] = fminunc(objective_function,start_par_value,optim_options,varargin{:}); + else + % Under Octave, use a wrapper, since fminunc() does not have a 4th arg + func = @(x) objective_function(x,varargin{:}); + [opt_par_values,fval,exitflag] = fminunc(func,start_par_value,optim_options); + end end - case 4 % Set default options. H0 = 1e-4*eye(n_params); @@ -500,7 +521,12 @@ switch minimizer_algorithm if options_.silent_optimizer solveoptoptions.verbosity = 0; end - [opt_par_values,fval]=solvopt(start_par_value,objective_function,[],[],[],solveoptoptions,varargin{:}); + if options_.analytic_derivation || (isfield(options_,'mom') && options_.mom.analytic_jacobian==1) + func = @(x) analytic_gradient_wrapper(x,objective_function,varargin{:}); + [opt_par_values,fval]=solvopt(start_par_value,func,1,[],[],solveoptoptions); + else + [opt_par_values,fval]=solvopt(start_par_value,objective_function,[],[],[],solveoptoptions,varargin{:}); + end case 102 if isoctave error('Optimization algorithm 2 is not available under Octave')