Commit 0718a3eb authored by Johannes Pfeifer 's avatar Johannes Pfeifer

dynare_minimize_objective: fix use of analytic derivatives by employing wrapper function

parent ff427807
...@@ -77,24 +77,37 @@ switch minimizer_algorithm ...@@ -77,24 +77,37 @@ switch minimizer_algorithm
% is not able to even move away from the initial point. % is not able to even move away from the initial point.
optim_options = optimoptions(optim_options, 'Algorithm','active-set'); optim_options = optimoptions(optim_options, 'Algorithm','active-set');
end end
if options_.analytic_derivation || options_.mom.analytic_jacobian==1
optim_options = optimoptions(optim_options,'GradObj','on','TolX',1e-7); %alter default TolX
end
if ~isempty(options_.optim_opt) if ~isempty(options_.optim_opt)
eval(['optim_options = optimoptions(optim_options,' options_.optim_opt ');']); eval(['optim_options = optimoptions(optim_options,' options_.optim_opt ');']);
end end
if options_.silent_optimizer if options_.silent_optimizer
optim_options = optimoptions(optim_options,'display','off'); optim_options = optimoptions(optim_options,'display','off');
end end
if options_.analytic_derivation if options_.analytic_derivation || options_.mom.analytic_jacobian==1 %use wrapper
optim_options = optimoptions(optim_options,'GradObj','on','TolX',1e-7); %alter default TolX func = @(x) analytic_gradient_wrapper(x,objective_function,varargin{:});
end if ~isoctave
if ~isoctave [opt_par_values,fval,exitflag,output,lamdba,grad,hessian_mat] = ...
[opt_par_values,fval,exitflag,output,lamdba,grad,hessian_mat] = ... fmincon(func,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options);
fmincon(objective_function,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options,varargin{:}); else
% Under Octave, use a wrapper, since fmincon() does not have an 11th
% arg. Also, only the first 4 output arguments are available.
[opt_par_values,fval,exitflag,output] = ...
fmincon(func,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options);
end
else else
% Under Octave, use a wrapper, since fmincon() does not have an 11th if ~isoctave
% arg. Also, only the first 4 output arguments are available. [opt_par_values,fval,exitflag,output,lamdba,grad,hessian_mat] = ...
func = @(x) objective_function(x,varargin{:}); fmincon(objective_function,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options,varargin{:});
[opt_par_values,fval,exitflag,output] = ... else
fmincon(func,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options); % Under Octave, use a wrapper, since fmincon() does not have an 11th
% arg. Also, only the first 4 output arguments are available.
func = @(x) objective_function(x,varargin{:});
[opt_par_values,fval,exitflag,output] = ...
fmincon(func,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options);
end
end end
case 2 case 2
...@@ -159,20 +172,28 @@ switch minimizer_algorithm ...@@ -159,20 +172,28 @@ switch minimizer_algorithm
if ~isempty(options_.optim_opt) if ~isempty(options_.optim_opt)
eval(['optim_options = optimoptions(optim_options,' options_.optim_opt ');']); eval(['optim_options = optimoptions(optim_options,' options_.optim_opt ');']);
end end
if options_.analytic_derivation
optim_options = optimoptions(optim_options,'GradObj','on');
end
if options_.silent_optimizer if options_.silent_optimizer
optim_options = optimoptions(optim_options,'display','off'); optim_options = optimoptions(optim_options,'display','off');
end end
if ~isoctave if options_.analytic_derivation || options_.mom.analytic_jacobian==1
[opt_par_values,fval,exitflag] = fminunc(objective_function,start_par_value,optim_options,varargin{:}); optim_options = optimoptions(optim_options,'GradObj','on');
if ~isoctave
func = @(x) analytic_gradient_wrapper(x,objective_function,varargin{:});
[opt_par_values,fval,exitflag] = fminunc(func,start_par_value,optim_options);
else
% Under Octave, use a wrapper, since fminunc() does not have a 4th arg
func = @(x) analytic_gradient_wrapper(x,objective_function,varargin{:});
[opt_par_values,fval,exitflag] = fminunc(func,start_par_value,optim_options);
end
else else
% Under Octave, use a wrapper, since fminunc() does not have a 4th arg if ~isoctave
func = @(x) objective_function(x,varargin{:}); [opt_par_values,fval,exitflag] = fminunc(objective_function,start_par_value,optim_options,varargin{:});
[opt_par_values,fval,exitflag] = fminunc(func,start_par_value,optim_options); else
% Under Octave, use a wrapper, since fminunc() does not have a 4th arg
func = @(x) objective_function(x,varargin{:});
[opt_par_values,fval,exitflag] = fminunc(func,start_par_value,optim_options);
end
end end
case 4 case 4
% Set default options. % Set default options.
H0 = 1e-4*eye(n_params); H0 = 1e-4*eye(n_params);
...@@ -505,7 +526,12 @@ switch minimizer_algorithm ...@@ -505,7 +526,12 @@ switch minimizer_algorithm
if options_.silent_optimizer if options_.silent_optimizer
solveoptoptions.verbosity = 0; solveoptoptions.verbosity = 0;
end end
[opt_par_values,fval]=solvopt(start_par_value,objective_function,[],[],[],solveoptoptions,varargin{:}); if options_.analytic_derivation || options_.mom.analytic_jacobian==1
func = @(x) analytic_gradient_wrapper(x,objective_function,varargin{:});
[opt_par_values,fval]=solvopt(start_par_value,func,1,[],[],solveoptoptions);
else
[opt_par_values,fval]=solvopt(start_par_value,objective_function,[],[],[],solveoptoptions,varargin{:});
end
case 102 case 102
if isoctave if isoctave
error('Optimization algorithm 2 is not available under Octave') error('Optimization algorithm 2 is not available under Octave')
...@@ -534,15 +560,22 @@ switch minimizer_algorithm ...@@ -534,15 +560,22 @@ switch minimizer_algorithm
eval(['optim_options = optimoptions(optim_options,' options_.optim_opt ');']); eval(['optim_options = optimoptions(optim_options,' options_.optim_opt ');']);
end end
if options_.silent_optimizer if options_.silent_optimizer
optim_options = optimoptions(optim_options,'Display','off'); optim_options.Display='off';
end end
if ~isoctave if options_.analytic_derivation || options_.mom.analytic_jacobian==1
[opt_par_values,Resnorm,fval,exitflag,OUTPUT,LAMBDA,JACOB] = lsqnonlin(objective_function,start_par_value,bounds(:,1),bounds(:,2),optim_options,varargin{:}); optim_options.SpecifyObjectiveGradient=true;
func = @(x) analytic_gradient_wrapper(x,objective_function,varargin{:});
[opt_par_values,Resnorm,fval,exitflag,OUTPUT,LAMBDA,JACOB] = ...
lsqnonlin(func,start_par_value,bounds(:,1),bounds(:,2),optim_options);
else else
% Under Octave, use a wrapper, since lsqnonlin() does not have a 6th arg if ~isoctave
func = @(x)objective_function(x,varargin{:}); [opt_par_values,Resnorm,fval,exitflag,OUTPUT,LAMBDA,JACOB] = lsqnonlin(objective_function,start_par_value,bounds(:,1),bounds(:,2),optim_options,varargin{:});
[opt_par_values,Resnorm,fval,exitflag,OUTPUT,LAMBDA,JACOB] = lsqnonlin(func,start_par_value,bounds(:,1),bounds(:,2),optim_options); else
end % Under Octave, use a wrapper, since lsqnonlin() does not have a 6th arg
func = @(x)objective_function(x,varargin{:});
[opt_par_values,Resnorm,fval,exitflag,OUTPUT,LAMBDA,JACOB] = lsqnonlin(func,start_par_value,bounds(:,1),bounds(:,2),optim_options);
end
end
otherwise otherwise
if ischar(minimizer_algorithm) if ischar(minimizer_algorithm)
if exist(minimizer_algorithm) if exist(minimizer_algorithm)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment