From 0718a3eb2584462d3a7a4a8405c167f979ff77bf Mon Sep 17 00:00:00 2001
From: Johannes Pfeifer <jpfeifer@gmx.de>
Date: Fri, 22 Jan 2021 20:09:32 +0100
Subject: [PATCH] dynare_minimize_objective: fix use of analytic derivatives by
 employing wrapper function

---
 .../optimization/dynare_minimize_objective.m  | 89 +++++++++++++------
 1 file changed, 61 insertions(+), 28 deletions(-)

diff --git a/matlab/optimization/dynare_minimize_objective.m b/matlab/optimization/dynare_minimize_objective.m
index 235bc43fe2..8025d5732a 100644
--- a/matlab/optimization/dynare_minimize_objective.m
+++ b/matlab/optimization/dynare_minimize_objective.m
@@ -77,24 +77,37 @@ switch minimizer_algorithm
         % is not able to even move away from the initial point.
         optim_options = optimoptions(optim_options, 'Algorithm','active-set');
     end
+    if options_.analytic_derivation || options_.mom.analytic_jacobian==1
+        optim_options = optimoptions(optim_options,'GradObj','on','TolX',1e-7); %alter default TolX
+    end
     if ~isempty(options_.optim_opt)
         eval(['optim_options = optimoptions(optim_options,' options_.optim_opt ');']);
     end
     if options_.silent_optimizer
         optim_options = optimoptions(optim_options,'display','off');
     end
-    if options_.analytic_derivation
-        optim_options = optimoptions(optim_options,'GradObj','on','TolX',1e-7); %alter default TolX
-    end
-    if ~isoctave
-        [opt_par_values,fval,exitflag,output,lamdba,grad,hessian_mat] = ...
-            fmincon(objective_function,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options,varargin{:});
+    if options_.analytic_derivation || options_.mom.analytic_jacobian==1 %use wrapper
+        func = @(x) analytic_gradient_wrapper(x,objective_function,varargin{:});
+        if ~isoctave
+            [opt_par_values,fval,exitflag,output,lamdba,grad,hessian_mat] = ...
+                fmincon(func,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options);
+        else
+            % Under Octave, use a wrapper, since fmincon() does not have an 11th
+            % arg. Also, only the first 4 output arguments are available.
+            [opt_par_values,fval,exitflag,output] = ...
+                fmincon(func,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options);
+        end
     else
-        % Under Octave, use a wrapper, since fmincon() does not have an 11th
-        % arg. Also, only the first 4 output arguments are available.
-        func = @(x) objective_function(x,varargin{:});
-        [opt_par_values,fval,exitflag,output] = ...
-            fmincon(func,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options);
+        if ~isoctave
+            [opt_par_values,fval,exitflag,output,lamdba,grad,hessian_mat] = ...
+                fmincon(objective_function,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options,varargin{:});
+        else
+            % Under Octave, use a wrapper, since fmincon() does not have an 11th
+            % arg. Also, only the first 4 output arguments are available.
+            func = @(x) objective_function(x,varargin{:});
+            [opt_par_values,fval,exitflag,output] = ...
+                fmincon(func,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options);
+        end    
     end
     
   case 2
@@ -159,20 +172,28 @@ switch minimizer_algorithm
     if ~isempty(options_.optim_opt)
         eval(['optim_options = optimoptions(optim_options,' options_.optim_opt ');']);
     end
-    if options_.analytic_derivation
-        optim_options = optimoptions(optim_options,'GradObj','on');
-    end
     if options_.silent_optimizer
         optim_options = optimoptions(optim_options,'display','off');
     end
-    if ~isoctave
-        [opt_par_values,fval,exitflag] = fminunc(objective_function,start_par_value,optim_options,varargin{:});
+    if options_.analytic_derivation || options_.mom.analytic_jacobian==1
+        optim_options = optimoptions(optim_options,'GradObj','on');
+        if ~isoctave
+            func = @(x) analytic_gradient_wrapper(x,objective_function,varargin{:});
+            [opt_par_values,fval,exitflag] = fminunc(func,start_par_value,optim_options);
+        else
+            % Under Octave, use a wrapper, since fminunc() does not have a 4th arg
+            func = @(x) analytic_gradient_wrapper(x,objective_function,varargin{:});
+            [opt_par_values,fval,exitflag] = fminunc(func,start_par_value,optim_options);
+        end
     else
-        % Under Octave, use a wrapper, since fminunc() does not have a 4th arg
-        func = @(x) objective_function(x,varargin{:});
-        [opt_par_values,fval,exitflag] = fminunc(func,start_par_value,optim_options);
+        if ~isoctave
+            [opt_par_values,fval,exitflag] = fminunc(objective_function,start_par_value,optim_options,varargin{:});
+        else
+            % Under Octave, use a wrapper, since fminunc() does not have a 4th arg
+            func = @(x) objective_function(x,varargin{:});
+            [opt_par_values,fval,exitflag] = fminunc(func,start_par_value,optim_options);
+        end
     end
-    
   case 4
     % Set default options.
     H0 = 1e-4*eye(n_params);
@@ -505,7 +526,12 @@ switch minimizer_algorithm
     if options_.silent_optimizer
         solveoptoptions.verbosity = 0;
     end
-    [opt_par_values,fval]=solvopt(start_par_value,objective_function,[],[],[],solveoptoptions,varargin{:});
+    if options_.analytic_derivation || options_.mom.analytic_jacobian==1
+        func = @(x) analytic_gradient_wrapper(x,objective_function,varargin{:});
+        [opt_par_values,fval]=solvopt(start_par_value,func,1,[],[],solveoptoptions);
+    else
+        [opt_par_values,fval]=solvopt(start_par_value,objective_function,[],[],[],solveoptoptions,varargin{:});
+    end
   case 102
     if isoctave
         error('Optimization algorithm 2 is not available under Octave')
@@ -534,15 +560,22 @@ switch minimizer_algorithm
         eval(['optim_options = optimoptions(optim_options,' options_.optim_opt ');']);
     end
     if options_.silent_optimizer
-        optim_options = optimoptions(optim_options,'Display','off');
+        optim_options.Display='off';
     end
-    if ~isoctave
-        [opt_par_values,Resnorm,fval,exitflag,OUTPUT,LAMBDA,JACOB] = lsqnonlin(objective_function,start_par_value,bounds(:,1),bounds(:,2),optim_options,varargin{:});
+    if options_.analytic_derivation || options_.mom.analytic_jacobian==1
+        optim_options.SpecifyObjectiveGradient=true;
+        func = @(x) analytic_gradient_wrapper(x,objective_function,varargin{:});
+        [opt_par_values,Resnorm,fval,exitflag,OUTPUT,LAMBDA,JACOB] = ...
+            lsqnonlin(func,start_par_value,bounds(:,1),bounds(:,2),optim_options);
     else
-        % Under Octave, use a wrapper, since lsqnonlin() does not have a 6th arg
-        func = @(x)objective_function(x,varargin{:});
-        [opt_par_values,Resnorm,fval,exitflag,OUTPUT,LAMBDA,JACOB] = lsqnonlin(func,start_par_value,bounds(:,1),bounds(:,2),optim_options);
-    end
+        if ~isoctave
+            [opt_par_values,Resnorm,fval,exitflag,OUTPUT,LAMBDA,JACOB] = lsqnonlin(objective_function,start_par_value,bounds(:,1),bounds(:,2),optim_options,varargin{:});
+        else
+            % Under Octave, use a wrapper, since lsqnonlin() does not have a 6th arg
+            func = @(x)objective_function(x,varargin{:});
+            [opt_par_values,Resnorm,fval,exitflag,OUTPUT,LAMBDA,JACOB] = lsqnonlin(func,start_par_value,bounds(:,1),bounds(:,2),optim_options);
+        end
+    end    
   otherwise
     if ischar(minimizer_algorithm)
         if exist(minimizer_algorithm)
-- 
GitLab