diff --git a/matlab/optimization/dynare_minimize_objective.m b/matlab/optimization/dynare_minimize_objective.m
index be0ba6237d95c6b5aa240fee6bfe37508606acdc..e611bed46d67935b37019e185de9ff52d9d6c930 100644
--- a/matlab/optimization/dynare_minimize_objective.m
+++ b/matlab/optimization/dynare_minimize_objective.m
@@ -289,6 +289,7 @@ switch minimizer_algorithm
     end
     nit=options_.newrat.maxiter;
     epsilon = options_.gradient_epsilon;
+    robust = false;
     Verbose = options_.newrat.verbosity;
     Save_files = options_.newrat.Save_files;
     if ~isempty(options_.optim_opt)
@@ -306,6 +307,8 @@ switch minimizer_algorithm
                 end
               case 'NumgradEpsilon'
                 epsilon = options_list{i,2};
+              case 'robust'
+                robust = options_list{i,2};
               case 'TolFun'
                 crit = options_list{i,2};
               case 'verbosity'
@@ -324,6 +327,7 @@ switch minimizer_algorithm
     hess_info.gstep=options_.gstep;
     hess_info.htol = 1.e-4;
     hess_info.h1=epsilon*ones(n_params,1);
+    hess_info.robust=robust;
     % here we force 7th input argument (flagg) to be 0, since outer product
     % gradient Hessian is handled in dynare_estimation_1
     [opt_par_values,hessian_mat,gg,fval,invhess,new_rat_hess_info] = newrat(objective_function,start_par_value,bounds,analytic_grad,crit,nit,0,Verbose,Save_files,hess_info,prior_information.p2,options_.gradient_epsilon,parameter_names,varargin{:});    %hessian_mat is the plain outer product gradient Hessian
diff --git a/matlab/optimization/mr_gstep.m b/matlab/optimization/mr_gstep.m
index 911fb49b696e4b68d68f10c2bcf8db2659529768..02ff7ac5299c739726a0532a7e55ff471d7ebe7d 100644
--- a/matlab/optimization/mr_gstep.m
+++ b/matlab/optimization/mr_gstep.m
@@ -1,4 +1,4 @@
-function [f0, x, ig] = mr_gstep(h1,x,bounds,func0,penalty,htol0,Verbose,Save_files,gradient_epsilon,parameter_names,varargin)
+function [f0, x, ig] = mr_gstep(h1,x,bounds,func0,penalty,htol0,Verbose,Save_files,gradient_epsilon,parameter_names,robust,varargin)
 % [f0, x, ig] = mr_gstep(h1,x,bounds,func0,penalty,htol0,Verbose,Save_files,gradient_epsilon,parameter_names,varargin)
 %
 % Gibbs type step in optimisation
@@ -73,15 +73,87 @@ while i<n
         gg(i)=(f1(i)'-f_1(i)')./(2.*h1(i));
         hh(i) = 1/max(1.e-9,abs( (f1(i)+f_1(i)-2*f0)./(h1(i)*h1(i)) ));
         if gg(i)*(hh(i)*gg(i))/2 > htol(i)
-            [f0, x, ~, ~] = csminit1(func0,x,penalty,f0,gg,0,diag(hh),Verbose,varargin{:});
+            [ff, xx,fcount,retcode] = csminit1(func0,x,penalty,f0,gg,0,diag(hh),Verbose,varargin{:});
+            if retcode && robust 
+%                 do_nothing=true;
+                if abs(x(i))<1.e-6
+                xa=transpose(linspace(x(i)/2, sign(x(i))*1.e-6*3/2, 7));
+                else
+                xa=transpose(linspace(x(i)/2, x(i)*3/2, 7));
+                end
+                for k=1:7
+                    xh1(i)=xa(k);
+                    fa(k,1) = penalty_objective_function(xh1,func0,penalty,varargin{:});
+                end
+                b=[ones(7,1) xa xa.*xa./2]\fa;
+                gg(i)=x(i)*b(3)+b(2);
+                hh(i)=1/b(3);
+                [ff2, xx2, fcount2, retcode2] = csminit1(func0,x,penalty,f0,gg,0,diag(hh),Verbose,varargin{:});
+                if ff2<ff
+                    ff=ff2;
+                    xx=xx2;
+                end
+                if min(fa)<ff
+                    [ff, im]=min(fa);
+                    xx(i)=xa(im);
+                end
+            end
             ig(i)=1;
+            if robust
+            if not(isequal(xx , check_bounds(xx,bounds)))
+                xx = check_bounds(xx,bounds);
+                if xx(i)<x(i)   
+                    % lower bound
+                    xx(i) = min(xx(i)+h1(i), 0.5*(xx(i)+x(i)));
+                else
+                    % upper bound
+                    xx(i) = max(xx(i)-h1(i), 0.5*(xx(i)+x(i)));
+                end
+                [ff,exit_flag]=penalty_objective_function(xx,func0,penalty,varargin{:});
+                if exit_flag~=1
+                    disp('last step exited with bad status!')
+                elseif ff<f0
+                    f0=ff;
+                    x=xx;
+                end
+            else
+                % check improvement wrt predicted one
+                if abs(f0-ff) < abs(gg(i)*(hh(i)*gg(i))/2/100) || abs(x(i)-xx(i))<1.e-10
+                    [ff1, xx1, fcount, retcode] = csminit1(func0,x,penalty,f0,-gg,0,diag(hh),Verbose,varargin{:});
+                    if not(isequal(xx1 , check_bounds(xx1,bounds)))
+                        xx1 = check_bounds(xx1,bounds);
+                        if xx1(i)<x(i)
+                            % lower bound
+                            xx1(i) = min(xx1(i)+h1(i), 0.5*(xx1(i)+x(i)));
+                        else
+                            % upper bound
+                            xx1(i) = max(xx1(i)-h1(i), 0.5*(xx1(i)+x(i)));
+                        end
+                        [ff1,exit_flag]=penalty_objective_function(xx1,func0,penalty,varargin{:});
+                        if exit_flag~=1
+                            disp('last step exited with bad status!')
+                        end
+                    end
+                    if ff1<ff
+                        ff=ff1;
+                        xx=xx1;
+                    end
+                end
+                f0=ff;
+                x=xx;
+            end
+            else
+                f0=ff;
+                x=xx;
+                x = check_bounds(x,bounds);
+            end
             if Verbose
-                fprintf(['Done for param %s = %8.4f\n'],parameter_names{i},x(i))
+                fprintf(['Done for param %s = %8.4f; f = %8.4f\n'],parameter_names{i},x(i),f0)
             end
         end
         xh1=x;
     end
-    x = check_bounds(x,bounds);
+%     penalty=f0;
     if Save_files
         save('gstep.mat','x','h1','f0')
     end
diff --git a/matlab/optimization/newrat.m b/matlab/optimization/newrat.m
index 00b0924e9dc24e364dfe732e6e2a113abeae76a1..68a98987926826b8bbdc5c3b03333ca31ca5e5ea 100644
--- a/matlab/optimization/newrat.m
+++ b/matlab/optimization/newrat.m
@@ -183,7 +183,7 @@ while norm(gg)>gtol && check==0 && jit<nit
             disp('last step exited with bad status!')
         end
     end
-    [fvala, x0, ig] = mr_gstep(h1,x0,bounds,func0,penalty,htol0,Verbose,Save_files,gradient_epsilon, parameter_names,varargin{:});
+    [fvala, x0, ig] = mr_gstep(h1,x0,bounds,func0,penalty,htol0,Verbose,Save_files,gradient_epsilon, parameter_names, hess_info.robust, varargin{:});
     if not(isequal(x0 , check_bounds(x0,bounds)))
         x0 = check_bounds(x0,bounds);
         [fvala,exit_flag]=penalty_objective_function(x0,func0,penalty,varargin{:});