Skip to content
Snippets Groups Projects
Commit 3da4d77f authored by Marco Ratto's avatar Marco Ratto
Browse files

fix to #1935. Forced options_.analytic_derivation=-1; must be set before...

fix to #1935. Forced options_.analytic_derivation=-1; must be set before entering dynare_minimize_objective
parent 91ff4ef7
Branches
No related tags found
No related merge requests found
......@@ -237,6 +237,14 @@ if ~isequal(options_.mode_compute,0) && ~options_.mh_posterior_mode_estimation &
optimizer_vec = [options_.mode_compute;num2cell(options_.additional_optimizer_steps)];
for optim_iter = 1:length(optimizer_vec)
current_optimizer = optimizer_vec{optim_iter};
if isnumeric(current_optimizer)
if current_optimizer==5
if options_.analytic_derivation
old_analytic_derivation = options_.analytic_derivation;
options_.analytic_derivation=-1; %force analytic outer product gradient hessian for each iteration
end
end
end
[xparam1, fval, ~, hh, options_, Scale, new_rat_hess_info] = dynare_minimize_objective(objective_function,xparam1,current_optimizer,options_,[bounds.lb bounds.ub],bayestopt_.name,bayestopt_,hh,dataset_,dataset_info,options_,M_,estim_params_,bayestopt_,bounds,oo_.dr, oo_.steady_state,oo_.exo_steady_state,oo_.exo_det_steady_state);
fprintf('\nFinal value of minus the log posterior (or likelihood):%f \n', fval);
......@@ -244,6 +252,9 @@ if ~isequal(options_.mode_compute,0) && ~options_.mh_posterior_mode_estimation &
if current_optimizer==5
newratflag = new_rat_hess_info.newratflag;
new_rat_hess_info = new_rat_hess_info.new_rat_hess_info;
if options_.analytic_derivation
options_.analytic_derivation = old_analytic_derivation;
end
elseif current_optimizer==6 %save scaling factor
save([M_.dname filesep 'Output' filesep M_.fname '_optimal_mh_scale_parameter.mat'],'Scale');
options_.mh_jscale = Scale;
......
......@@ -277,8 +277,6 @@ switch minimizer_algorithm
prior_information.p2=NaN(n_params,1);
end
if options_.analytic_derivation
old_analytic_derivation = options_.analytic_derivation;
options_.analytic_derivation=-1; %force analytic outer product gradient hessian for each iteration
analytic_grad=1;
crit = options_.newrat.tolerance.f_analytic;
newratflag = 0; %analytical Hessian
......@@ -340,9 +338,6 @@ switch minimizer_algorithm
[opt_par_values,hessian_mat,~,fval,~,new_rat_hess_info] = newrat(objective_function,start_par_value,bounds,analytic_grad,crit,nit,0,Verbose,Save_files,hess_info,prior_information.p2,options_.gradient_epsilon,parameter_names,varargin{:}); %hessian_mat is the plain outer product gradient Hessian
new_rat_hess_info.new_rat_hess_info = new_rat_hess_info;
new_rat_hess_info.newratflag = newratflag;
if options_.analytic_derivation
options_.analytic_derivation = old_analytic_derivation;
end
case 6
if isempty(prior_information) %Inf will be reset
prior_information.p2=Inf(n_params,1);
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment