From 3da4d77fc375c49b4b12146f9d2433114fcbdee8 Mon Sep 17 00:00:00 2001 From: Marco Ratto <marco.ratto@ec.europa.eu> Date: Mon, 27 Jan 2025 17:51:11 +0100 Subject: [PATCH] fix to #1935. Forced options_.analytic_derivation=-1; must be set before entering dynare_minimize_objective --- matlab/estimation/dynare_estimation_1.m | 11 +++++++++++ matlab/optimization/dynare_minimize_objective.m | 5 ----- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/matlab/estimation/dynare_estimation_1.m b/matlab/estimation/dynare_estimation_1.m index 12c0444b5a..e82e4389dd 100644 --- a/matlab/estimation/dynare_estimation_1.m +++ b/matlab/estimation/dynare_estimation_1.m @@ -237,6 +237,14 @@ if ~isequal(options_.mode_compute,0) && ~options_.mh_posterior_mode_estimation & optimizer_vec = [options_.mode_compute;num2cell(options_.additional_optimizer_steps)]; for optim_iter = 1:length(optimizer_vec) current_optimizer = optimizer_vec{optim_iter}; + if isnumeric(current_optimizer) + if current_optimizer==5 + if options_.analytic_derivation + old_analytic_derivation = options_.analytic_derivation; + options_.analytic_derivation=-1; %force analytic outer product gradient hessian for each iteration + end + end + end [xparam1, fval, ~, hh, options_, Scale, new_rat_hess_info] = dynare_minimize_objective(objective_function,xparam1,current_optimizer,options_,[bounds.lb bounds.ub],bayestopt_.name,bayestopt_,hh,dataset_,dataset_info,options_,M_,estim_params_,bayestopt_,bounds,oo_.dr, oo_.steady_state,oo_.exo_steady_state,oo_.exo_det_steady_state); fprintf('\nFinal value of minus the log posterior (or likelihood):%f \n', fval); @@ -244,6 +252,9 @@ if ~isequal(options_.mode_compute,0) && ~options_.mh_posterior_mode_estimation & if current_optimizer==5 newratflag = new_rat_hess_info.newratflag; new_rat_hess_info = new_rat_hess_info.new_rat_hess_info; + if options_.analytic_derivation + options_.analytic_derivation = old_analytic_derivation; + end elseif current_optimizer==6 %save scaling factor save([M_.dname filesep 'Output' filesep M_.fname '_optimal_mh_scale_parameter.mat'],'Scale'); options_.mh_jscale = Scale; diff --git a/matlab/optimization/dynare_minimize_objective.m b/matlab/optimization/dynare_minimize_objective.m index 0452ca2955..a1cc385e3b 100644 --- a/matlab/optimization/dynare_minimize_objective.m +++ b/matlab/optimization/dynare_minimize_objective.m @@ -277,8 +277,6 @@ switch minimizer_algorithm prior_information.p2=NaN(n_params,1); end if options_.analytic_derivation - old_analytic_derivation = options_.analytic_derivation; - options_.analytic_derivation=-1; %force analytic outer product gradient hessian for each iteration analytic_grad=1; crit = options_.newrat.tolerance.f_analytic; newratflag = 0; %analytical Hessian @@ -340,9 +338,6 @@ switch minimizer_algorithm [opt_par_values,hessian_mat,~,fval,~,new_rat_hess_info] = newrat(objective_function,start_par_value,bounds,analytic_grad,crit,nit,0,Verbose,Save_files,hess_info,prior_information.p2,options_.gradient_epsilon,parameter_names,varargin{:}); %hessian_mat is the plain outer product gradient Hessian new_rat_hess_info.new_rat_hess_info = new_rat_hess_info; new_rat_hess_info.newratflag = newratflag; - if options_.analytic_derivation - options_.analytic_derivation = old_analytic_derivation; - end case 6 if isempty(prior_information) %Inf will be reset prior_information.p2=Inf(n_params,1); -- GitLab