From f6a8473144cac33a84e62734d9bf6b80693d84d3 Mon Sep 17 00:00:00 2001
From: Johannes Pfeifer <jpfeifer@gmx.de>
Date: Thu, 14 Sep 2023 15:40:13 +0200
Subject: [PATCH] estimation: support additional_optimizer_steps

Closes https://git.dynare.org/Dynare/dynare/-/issues/1573
---
 doc/manual/source/the-model-file.rst |   7 ++
 matlab/default_option_values.m       |   1 +
 matlab/dynare_estimation_1.m         | 126 ++++++++++++++-------------
 tests/estimation/fs2000.mod          |   3 +-
 4 files changed, 77 insertions(+), 60 deletions(-)

diff --git a/doc/manual/source/the-model-file.rst b/doc/manual/source/the-model-file.rst
index fb70d98ae7..7944b37e17 100644
--- a/doc/manual/source/the-model-file.rst
+++ b/doc/manual/source/the-model-file.rst
@@ -6701,6 +6701,13 @@ observed variables.
 
        |br| Default value is ``4``.
 
+    .. option:: additional_optimizer_steps = [INTEGER]
+                additional_optimizer_steps = [INTEGER1:INTEGER2]
+                additional_optimizer_steps = [INTEGER1 INTEGER2 ...]
+
+        Vector of additional minimization algorithms run after
+        ``mode_compute``. Default: no additional optimization iterations.
+
     .. option:: silent_optimizer
 
        Instructs Dynare to run mode computing/optimization silently
diff --git a/matlab/default_option_values.m b/matlab/default_option_values.m
index 712fd6ef18..2e8a7033c4 100644
--- a/matlab/default_option_values.m
+++ b/matlab/default_option_values.m
@@ -444,6 +444,7 @@ options_.prior_restrictions.status = 0;
 options_.prior_restrictions.routine = [];
 
 options_.mode_compute = 4;
+options_.additional_optimizer_steps= [];
 options_.mode_file = '';
 options_.moments_varendo = false;
 options_.nk = 1;
diff --git a/matlab/dynare_estimation_1.m b/matlab/dynare_estimation_1.m
index 29aa7842de..f4349f224f 100644
--- a/matlab/dynare_estimation_1.m
+++ b/matlab/dynare_estimation_1.m
@@ -222,72 +222,80 @@ end
 
 %% Estimation of the posterior mode or likelihood mode
 
-if ~isequal(options_.mode_compute,0) && ~options_.mh_posterior_mode_estimation
 
-    [xparam1, fval, exitflag, hh, options_, Scale, new_rat_hess_info] = dynare_minimize_objective(objective_function,xparam1,options_.mode_compute,options_,[bounds.lb bounds.ub],bayestopt_.name,bayestopt_,hh,dataset_,dataset_info,options_,M_,estim_params_,bayestopt_,bounds,oo_);
-    fprintf('\nFinal value of minus the log posterior (or likelihood):%f \n', fval);
 
-    if isnumeric(options_.mode_compute) && options_.mode_compute==5
-        newratflag = new_rat_hess_info.newratflag;
-        new_rat_hess_info = new_rat_hess_info.new_rat_hess_info;
-    elseif isnumeric(options_.mode_compute) && options_.mode_compute==6 %save scaling factor
-        save([M_.dname filesep 'Output' filesep M_.fname '_optimal_mh_scale_parameter.mat'],'Scale');
-        options_.mh_jscale = Scale;
-        bayestopt_.jscale(:) = options_.mh_jscale;
-    end
-    if ~isnumeric(options_.mode_compute) || ~isequal(options_.mode_compute,6) %always already computes covariance matrix
-        if options_.cova_compute == 1 %user did not request covariance not to be computed
-            if options_.analytic_derivation && strcmp(func2str(objective_function),'dsge_likelihood')
-                ana_deriv_old = options_.analytic_derivation;
-                options_.analytic_derivation = 2;
-                [~,~,~,~,hh] = feval(objective_function,xparam1, ...
-                                     dataset_,dataset_info,options_,M_,estim_params_,bayestopt_,bounds,oo_);
-                options_.analytic_derivation = ana_deriv_old;
-            elseif ~isnumeric(options_.mode_compute) || ~(isequal(options_.mode_compute,5) && newratflag~=1 && strcmp(func2str(objective_function),'dsge_likelihood'))
-                % enter here if i) not mode_compute_5, ii) if mode_compute_5 and newratflag==1; 
-                % with flag==0 or 2 and dsge_likelihood, we force to use
-                % the hessian from outer product gradient of optimizer 5 below
-                if options_.hessian.use_penalized_objective
-                    penalized_objective_function = str2func('penalty_objective_function');
-                    hh = hessian(penalized_objective_function, xparam1, options_.gstep, objective_function, fval, dataset_, dataset_info, options_, M_, estim_params_, bayestopt_, bounds,oo_);
-                else
-                    hh = hessian(objective_function, xparam1, options_.gstep, dataset_, dataset_info, options_, M_, estim_params_, bayestopt_, bounds,oo_);
-                end
-                hh = reshape(hh, nx, nx);
-            elseif isnumeric(options_.mode_compute) && isequal(options_.mode_compute,5)
-                % other numerical hessian options available with optimizer
-                % 5 and dsge_likelihood
-                %
-                % if newratflag == 0
-                % compute outer product gradient of optimizer 5
-                %
-                % if newratflag == 2
-                % compute 'mixed' outer product gradient of optimizer 5
-                % with diagonal elements computed with numerical second order derivatives
-                %
-                % uses univariate filters, so to get max # of available
-                % densities for outer product gradient
-                kalman_algo0 = options_.kalman_algo;
-                compute_hessian = 1;
-                if ~((options_.kalman_algo == 2) || (options_.kalman_algo == 4))
-                    options_.kalman_algo=2;
-                    if options_.lik_init == 3
-                        options_.kalman_algo=4;
+if ~isequal(options_.mode_compute,0) && ~options_.mh_posterior_mode_estimation
+    optimizer_vec = [options_.mode_compute;num2cell(options_.additional_optimizer_steps)];
+    for optim_iter = 1:length(optimizer_vec)
+        current_optimizer = optimizer_vec{optim_iter};
+
+        [xparam1, fval, exitflag, hh, options_, Scale, new_rat_hess_info] = dynare_minimize_objective(objective_function,xparam1,current_optimizer,options_,[bounds.lb bounds.ub],bayestopt_.name,bayestopt_,hh,dataset_,dataset_info,options_,M_,estim_params_,bayestopt_,bounds,oo_);
+        fprintf('\nFinal value of minus the log posterior (or likelihood):%f \n', fval);
+
+        if isnumeric(current_optimizer)
+            if current_optimizer==5
+                newratflag = new_rat_hess_info.newratflag;
+                new_rat_hess_info = new_rat_hess_info.new_rat_hess_info;
+            elseif current_optimizer==6 %save scaling factor
+                save([M_.dname filesep 'Output' filesep M_.fname '_optimal_mh_scale_parameter.mat'],'Scale');
+                options_.mh_jscale = Scale;
+                bayestopt_.jscale(:) = options_.mh_jscale;
+            end
+        end
+        if ~isnumeric(current_optimizer) || ~isequal(current_optimizer,6) %always already computes covariance matrix
+            if options_.cova_compute == 1 %user did not request covariance not to be computed
+                if options_.analytic_derivation && strcmp(func2str(objective_function),'dsge_likelihood')
+                    ana_deriv_old = options_.analytic_derivation;
+                    options_.analytic_derivation = 2;
+                    [~,~,~,~,hh] = feval(objective_function,xparam1, ...
+                        dataset_,dataset_info,options_,M_,estim_params_,bayestopt_,bounds,oo_);
+                    options_.analytic_derivation = ana_deriv_old;
+                elseif ~isnumeric(current_optimizer) || ~(isequal(current_optimizer,5) && newratflag~=1 && strcmp(func2str(objective_function),'dsge_likelihood'))
+                    % enter here if i) not mode_compute_5, ii) if mode_compute_5 and newratflag==1;
+                    % with flag==0 or 2 and dsge_likelihood, we force to use
+                    % the hessian from outer product gradient of optimizer 5 below
+                    if options_.hessian.use_penalized_objective
+                        penalized_objective_function = str2func('penalty_objective_function');
+                        hh = hessian(penalized_objective_function, xparam1, options_.gstep, objective_function, fval, dataset_, dataset_info, options_, M_, estim_params_, bayestopt_, bounds,oo_);
+                    else
+                        hh = hessian(objective_function, xparam1, options_.gstep, dataset_, dataset_info, options_, M_, estim_params_, bayestopt_, bounds,oo_);
                     end
-                elseif newratflag==0 % hh already contains outer product gradient with univariate filter
-                    compute_hessian = 0;
-                end
-                if compute_hessian
-                    crit = options_.newrat.tolerance.f;
-                    newratflag = newratflag>0;
-                    hh = reshape(mr_hessian(xparam1,objective_function,fval,newratflag,crit,new_rat_hess_info,[bounds.lb bounds.ub],bayestopt_.p2,0,dataset_, dataset_info, options_,M_,estim_params_,bayestopt_,bounds,oo_), nx, nx);
+                    hh = reshape(hh, nx, nx);
+                elseif isnumeric(current_optimizer) && isequal(current_optimizer,5)
+                    % other numerical hessian options available with optimizer
+                    % 5 and dsge_likelihood
+                    %
+                    % if newratflag == 0
+                    % compute outer product gradient of optimizer 5
+                    %
+                    % if newratflag == 2
+                    % compute 'mixed' outer product gradient of optimizer 5
+                    % with diagonal elements computed with numerical second order derivatives
+                    %
+                    % uses univariate filters, so to get max # of available
+                    % densities for outer product gradient
+                    kalman_algo0 = options_.kalman_algo;
+                    compute_hessian = 1;
+                    if ~((options_.kalman_algo == 2) || (options_.kalman_algo == 4))
+                        options_.kalman_algo=2;
+                        if options_.lik_init == 3
+                            options_.kalman_algo=4;
+                        end
+                    elseif newratflag==0 % hh already contains outer product gradient with univariate filter
+                        compute_hessian = 0;
+                    end
+                    if compute_hessian
+                        crit = options_.newrat.tolerance.f;
+                        newratflag = newratflag>0;
+                        hh = reshape(mr_hessian(xparam1,objective_function,fval,newratflag,crit,new_rat_hess_info,[bounds.lb bounds.ub],bayestopt_.p2,0,dataset_, dataset_info, options_,M_,estim_params_,bayestopt_,bounds,oo_), nx, nx);
+                    end
+                    options_.kalman_algo = kalman_algo0;
                 end
-                options_.kalman_algo = kalman_algo0;
             end
         end
+        parameter_names = bayestopt_.name;
     end
-    parameter_names = bayestopt_.name;
-    if options_.cova_compute || options_.mode_compute==5 || options_.mode_compute==6
+    if options_.cova_compute || current_optimizer==5 || current_optimizer==6
         save([M_.dname filesep 'Output' filesep M_.fname '_mode.mat'],'xparam1','hh','parameter_names','fval');
     else
         save([M_.dname filesep 'Output' filesep M_.fname '_mode.mat'],'xparam1','parameter_names','fval');
diff --git a/tests/estimation/fs2000.mod b/tests/estimation/fs2000.mod
index 82ea58e95b..97843d888b 100644
--- a/tests/estimation/fs2000.mod
+++ b/tests/estimation/fs2000.mod
@@ -87,7 +87,8 @@ estimation(order=1,datafile=fsdat_simul,nobs=192,loglinear,mh_replic=3000,mh_nbl
         taper_steps = [4 7 15],
         raftery_lewis_diagnostics,
         raftery_lewis_qrs=[0.025 0.01 0.95],
-        bayesian_irf,posterior_nograph
+        bayesian_irf,posterior_nograph,
+        additional_optimizer_steps=[4]
         ) y m;
 
 if ~isequal(options_.convergence.geweke.taper_steps,[4 7 15]') || ~isequal(options_.convergence.geweke.geweke_interval,[0.19 0.49])
-- 
GitLab