Commit 1dabbd88 authored by Stéphane Adjemian's avatar Stéphane Adjemian
Browse files

Fixed Marco's optimization routines (mode_compute==5).

Added fs2000d.mod in the testsuite (test of Marco's optimization routines).
parent c5b2afa3
This diff is collapsed.
function [f0, x, ig] = mr_gstep(h1,x,func0,htol0,varargin)
function [f0, x, ig] = mr_gstep(h1,x,func0,htol0,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults)
% function [f0, x, ig] = mr_gstep(h1,x,func0,htol0,varargin)
%
%
% Gibbs type step in optimisation
% Copyright (C) 2006-2011 Dynare Team
......@@ -22,13 +22,12 @@ function [f0, x, ig] = mr_gstep(h1,x,func0,htol0,varargin)
n=size(x,1);
if nargin<4,
if isempty(htol0)
htol = 1.e-6;
else
htol = htol0;
end
func = str2func(func0);
f0=feval(func,x,varargin{:});
f0=feval(func0,x,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
xh1=x;
f1=zeros(size(f0,1),n);
......@@ -36,37 +35,29 @@ f_1=f1;
i=0;
ig=zeros(n,1);
while i<n,
while i<n
i=i+1;
h10=h1(i);
hcheck=0;
dx=[];
xh1(i)=x(i)+h1(i);
fx = feval(func,xh1,varargin{:});
fx = feval(func0,xh1,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
f1(:,i)=fx;
xh1(i)=x(i)-h1(i);
fx = feval(func,xh1,varargin{:});
fx = feval(func0,xh1,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
f_1(:,i)=fx;
if hcheck && htol<1,
if hcheck && htol<1
htol=min(1,max(min(abs(dx))*2,htol*10));
h1(i)=h10;
xh1(i)=x(i);
i=i-1;
else
gg=zeros(size(x));
gg=zeros(size(x));
hh=gg;
gg(i)=(f1(i)'-f_1(i)')./(2.*h1(i));
hh(i) = 1/max(1.e-9,abs( (f1(i)+f_1(i)-2*f0)./(h1(i)*h1(i)) ));
% if abs(f1(i)+f_1(i)-2*f0)>1.e-12,
% hh(i) = abs(1/( (f1(i)+f_1(i)-2*f0)./(h1(i)*h1(i)) ));
% else
% hh(i) = 1;
% end
if gg(i)*(hh(i)*gg(i))/2 > htol,
[f0 x fc retcode] = csminit(func0,x,f0,gg,0,diag(hh),varargin{:});
if gg(i)*(hh(i)*gg(i))/2 > htol
[f0 x fc retcode] = csminit(func0,x,f0,gg,0,diag(hh),DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
ig(i)=1;
end
xh1=x;
......
function [hessian_mat, gg, htol1, ihh, hh_mat0, hh1] = mr_hessian(init,x,func,hflag,htol0,varargin)
function [hessian_mat, gg, htol1, ihh, hh_mat0, hh1] = mr_hessian(init,x,func,hflag,htol0,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults)
% [hessian_mat, gg, htol1, ihh, hh_mat0, hh1] = mr_hessian(init,x,func,hflag,htol0,varargin)
%
% numerical gradient and Hessian, with 'automatic' check of numerical
% error
% error
%
% adapted from Michel Juillard original rutine hessian.m
%
% func = name of the function: func must give two outputs:
% func = name of the function: func must give two outputs:
% - the log-likelihood AND the single contributions at times t=1,...,T
% of the log-likelihood to compute outer product gradient
% x = parameter values
......@@ -41,26 +41,24 @@ function [hessian_mat, gg, htol1, ihh, hh_mat0, hh1] = mr_hessian(init,x,func,hf
% You should have received a copy of the GNU General Public License
% along with Dynare. If not, see <http://www.gnu.org/licenses/>.
global options_ bayestopt_
persistent h1 htol
n=size(x,1);
if init,
gstep_=options_.gstep;
if init
gstep_=DynareOptions.gstep;
htol = 1.e-4;
%h1=max(abs(x),sqrt(gstep_)*ones(n,1))*eps^(1/4);
h1=options_.gradient_epsilon*ones(n,1);
return,
h1=DynareOptions.gradient_epsilon*ones(n,1);
return
end
func = str2func(func);
[f0, ff0]=feval(func,x,varargin{:});
h2=bayestopt_.ub-bayestopt_.lb;
hmax=bayestopt_.ub-x;
hmax=min(hmax,x-bayestopt_.lb);
[f0, ff0]=feval(func,x,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
h2=BayesInfo.ub-BayesInfo.lb;
hmax=BayesInfo.ub-x;
hmax=min(hmax,x-BayesInfo.lb);
h1 = min(h1,0.5.*hmax);
if htol0<htol,
if htol0<htol
htol=htol0;
end
xh1=x;
......@@ -71,24 +69,22 @@ ff_1=ff1;
ggh=zeros(size(ff0,1),n);
i=0;
while i<n,
while i<n
i=i+1;
h10=h1(i);
hcheck=0;
xh1(i)=x(i)+h1(i);
try
[fx, ffx]=feval(func,xh1,varargin{:});
[fx, ffx]=feval(func,xh1,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
catch
fx=1.e8;
end
it=1;
dx=(fx-f0);
ic=0;
icount = 0;
h0=h1(i);
while (abs(dx(it))<0.5*htol || abs(dx(it))>(3*htol)) && icount<10 && ic==0,
%while abs(dx(it))<0.5*htol && icount< 10 && ic==0,
while (abs(dx(it))<0.5*htol || abs(dx(it))>(3*htol)) && icount<10 && ic==0
icount=icount+1;
if abs(dx(it))<0.5*htol
if abs(dx(it)) ~= 0,
......@@ -99,51 +95,51 @@ while i<n,
h1(i) = min(h1(i),0.5*hmax(i));
xh1(i)=x(i)+h1(i);
try
[fx, ffx]=feval(func,xh1,varargin{:});
[fx, ffx]=feval(func,xh1,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
catch
fx=1.e8;
end
end
if abs(dx(it))>(3*htol),
if abs(dx(it))>(3*htol)
h1(i)= htol/abs(dx(it))*h1(i);
xh1(i)=x(i)+h1(i);
try
[fx, ffx]=feval(func,xh1,varargin{:});
[fx, ffx]=feval(func,xh1,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
catch
fx=1.e8;
end
while (fx-f0)==0,
while (fx-f0)==0
h1(i)= h1(i)*2;
xh1(i)=x(i)+h1(i);
[fx, ffx]=feval(func,xh1,varargin{:});
[fx, ffx]=feval(func,xh1,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
ic=1;
end
end
it=it+1;
dx(it)=(fx-f0);
h0(it)=h1(i);
if (h1(i)<1.e-12*min(1,h2(i)) && h1(i)<0.5*hmax(i)),% || (icount==10 && abs(dx(it))>(3*htol)),
if (h1(i)<1.e-12*min(1,h2(i)) && h1(i)<0.5*hmax(i))% || (icount==10 && abs(dx(it))>(3*htol)),
ic=1;
hcheck=1;
end
end
f1(:,i)=fx;
if any(isnan(ffx)),
if any(isnan(ffx))
ff1=ones(size(ff0)).*fx/length(ff0);
else
ff1=ffx;
end
xh1(i)=x(i)-h1(i);
[fx, ffx]=feval(func,xh1,varargin{:});
[fx, ffx]=feval(func,xh1,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
f_1(:,i)=fx;
if any(isnan(ffx)),
if any(isnan(ffx))
ff_1=ones(size(ff0)).*fx/length(ff0);
else
ff_1=ffx;
end
ggh(:,i)=(ff1-ff_1)./(2.*h1(i));
xh1(i)=x(i);
if hcheck && htol<1,
if hcheck && htol<1
htol=min(1,max(min(abs(dx))*2,htol*10));
h1(i)=h10;
i=0;
......@@ -157,14 +153,14 @@ xh_1=xh1;
gg=(f1'-f_1')./(2.*h1);
if hflag==2,
if hflag==2
gg=(f1'-f_1')./(2.*h1);
hessian_mat = zeros(size(f0,1),n*n);
for i=1:n
if i > 1
k=[i:n:n*(i-1)];
hessian_mat(:,(i-1)*n+1:(i-1)*n+i-1)=hessian_mat(:,k);
end
end
hessian_mat(:,(i-1)*n+i)=(f1(:,i)+f_1(:,i)-2*f0)./(h1(i)*h_1(i));
temp=f1+f_1-f0*ones(1,n);
for j=i+1:n
......@@ -172,10 +168,8 @@ if hflag==2,
xh1(j)=x(j)+h_1(j);
xh_1(i)=x(i)-h1(i);
xh_1(j)=x(j)-h_1(j);
temp1 = feval(func,xh1,varargin{:});
temp2 = feval(func,xh_1,varargin{:});
temp1 = feval(func,xh1,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
temp2 = feval(func,xh_1,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
hessian_mat(:,(i-1)*n+j)=-(-temp1 -temp2+temp(:,i)+temp(:,j))./(2*h1(i)*h_1(j));
xh1(i)=x(i);
xh1(j)=x(j);
......@@ -186,27 +180,25 @@ if hflag==2,
end
i=i+1;
end
elseif hflag==1,
elseif hflag==1
hessian_mat = zeros(size(f0,1),n*n);
for i=1:n,
for i=1:n
dum = (f1(:,i)+f_1(:,i)-2*f0)./(h1(i)*h_1(i));
if dum>eps,
if dum>eps
hessian_mat(:,(i-1)*n+i)=dum;
else
hessian_mat(:,(i-1)*n+i)=max(eps, gg(i)^2);
end
end
end
%hessian_mat2=hh_mat(:)';
end
gga=ggh.*kron(ones(size(ff1)),2.*h1'); % re-scaled gradient
hh_mat=gga'*gga; % rescaled outer product hessian
hh_mat=gga'*gga; % rescaled outer product hessian
hh_mat0=ggh'*ggh; % outer product hessian
A=diag(2.*h1); % rescaling matrix
% igg=inv(hh_mat); % inverted rescaled outer product hessian
ihh=A'*(hh_mat\A); % inverted outer product hessian
if hflag>0 && min(eig(reshape(hessian_mat,n,n)))>0,
if hflag>0 && min(eig(reshape(hessian_mat,n,n)))>0
hh0 = A*reshape(hessian_mat,n,n)*A'; %rescaled second order derivatives
hh = reshape(hessian_mat,n,n); %rescaled second order derivatives
sd0=sqrt(diag(hh0)); %rescaled 'standard errors' using second order derivatives
......@@ -217,10 +209,9 @@ if hflag>0 && min(eig(reshape(hessian_mat,n,n)))>0,
hh_mat0=inv(A)'*hh_mat*inv(A); % outer product hessian with 'true' std's
sd=sqrt(diag(ihh)); %standard errors
sdh=sqrt(1./diag(hh)); %diagonal standard errors
for j=1:length(sd),
sd0(j,1)=min(bayestopt_.p2(j), sd(j)); %prior std
for j=1:length(sd)
sd0(j,1)=min(BayesInfo.p2(j), sd(j)); %prior std
sd0(j,1)=10^(0.5*(log10(sd0(j,1))+log10(sdh(j,1))));
%sd0(j,1)=0.5*(sd0(j,1)+sdh(j,1));
end
ihh=ihh./(sd*sd').*(sd0*sd0'); %inverse outer product with modified std's
igg=inv(A)'*ihh*inv(A); % inverted rescaled outer product hessian with modified std's
......@@ -233,18 +224,15 @@ if hflag>0 && min(eig(reshape(hessian_mat,n,n)))>0,
% ihh=A'*igg*A; % inverted outer product hessian
% hh_mat0=inv(A)'*hh_mat*inv(A); % outer product hessian with 'true' std's
end
if hflag<2,
if hflag<2
hessian_mat=hh_mat0(:);
end
if any(isnan(hessian_mat)),
if any(isnan(hessian_mat))
hh_mat0=eye(length(hh_mat0));
ihh=hh_mat0;
hessian_mat=hh_mat0(:);
hessian_mat=hh_mat0(:);
end
hh1=h1;
htol1=htol;
save hess.mat
% 11/25/03 SA Created from Hessian_sparse (removed sparse)
save hess.mat
\ No newline at end of file
function [xparam1, hh, gg, fval, igg] = newrat(func0, x, hh, gg, igg, ftol0, nit, flagg, varargin)
function [xparam1, hh, gg, fval, igg] = newrat(func0, x, hh, gg, igg, ftol0, nit, flagg, DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults)
% [xparam1, hh, gg, fval, igg] = newrat(func0, x, hh, gg, igg, ftol0, nit, flagg, varargin)
%
% Optimiser with outer product gradient and with sequences of univariate steps
......@@ -13,17 +13,17 @@ function [xparam1, hh, gg, fval, igg] = newrat(func0, x, hh, gg, igg, ftol0, nit
% hh = initial Hessian [OPTIONAL]
% gg = initial gradient [OPTIONAL]
% igg = initial inverse Hessian [OPTIONAL]
% ftol0 = ending criterion for function change
% ftol0 = ending criterion for function change
% nit = maximum number of iterations
%
% In each iteration, Hessian is computed with outer product gradient.
% for final Hessian (to start Metropolis):
% flagg = 0, final Hessian computed with outer product gradient
% flagg = 1, final 'mixed' Hessian: diagonal elements computed with numerical second order derivatives
% with correlation structure as from outer product gradient,
% with correlation structure as from outer product gradient,
% flagg = 2, full numerical Hessian
%
% varargin = list of parameters for func0
% varargin = list of parameters for func0
% Copyright (C) 2004-2011 Dynare Team
%
......@@ -42,7 +42,6 @@ function [xparam1, hh, gg, fval, igg] = newrat(func0, x, hh, gg, igg, ftol0, nit
% You should have received a copy of the GNU General Public License
% along with Dynare. If not, see <http://www.gnu.org/licenses/>.
global bayestopt_
icount=0;
nx=length(x);
xparam1=x;
......@@ -53,29 +52,27 @@ ftol=ftol0;
gtol=1.e-3;
htol=htol_base;
htol0=htol_base;
gibbstol=length(bayestopt_.pshape)/50; %25;
gibbstol=length(BayesInfo.pshape)/50; %25;
func_hh = [func0,'_hh'];
func = str2func(func0);
fval0=feval(func,x,varargin{:});
func_hh = str2func([func2str(func0),'_hh']);
fval0=feval(func0,x,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
fval=fval0;
% initialize mr_gstep and mr_hessian
% mr_gstep(1,x);
mr_hessian(1,x);
mr_hessian(1,x,[],[],[],DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
if isempty(hh)
[dum, gg, htol0, igg, hhg, h1]=mr_hessian(0,x,func_hh,flagit,htol,varargin{:});
[dum, gg, htol0, igg, hhg, h1]=mr_hessian(0,x,func_hh,flagit,htol,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
hh0 = reshape(dum,nx,nx);
hh=hhg;
if min(eig(hh0))<0,
if min(eig(hh0))<0
hh0=hhg; %generalized_cholesky(hh0);
elseif flagit==2,
elseif flagit==2
hh=hh0;
igg=inv(hh);
end
if htol0>htol,
if htol0>htol
htol=htol0;
%ftol=htol0;
end
else
hh0=hh;
......@@ -99,73 +96,67 @@ jit=0;
nig=[];
ig=ones(nx,1);
ggx=zeros(nx,1);
while norm(gg)>gtol && check==0 && jit<nit,
while norm(gg)>gtol && check==0 && jit<nit
jit=jit+1;
tic
icount=icount+1;
bayestopt_.penalty = fval0(icount);
disp([' '])
disp(['Iteration ',num2str(icount)])
[fval x0 fc retcode] = csminit(func0,xparam1,fval0(icount),gg,0,H,varargin{:});
if igrad,
[fval1 x01 fc retcode1] = csminit(func0,x0,fval,gg,0,inx,varargin{:});
if (fval-fval1)>1, %(fval0(icount)-fval),
[fval,x0,fc,retcode] = csminit1(func0,xparam1,fval0(icount),gg,0,H,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
if igrad
[fval1,x01,fc,retcode1] = csminit1(func0,x0,fval,gg,0,inx,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
if (fval-fval1)>1
disp('Gradient step!!')
else
igrad=0;
end
fval=fval1;
x0=x01;
x0=x01;
end
if (fval0(icount)-fval)<1.e-2*(gg'*(H*gg))/2 && igibbs,
if length(find(ig))<nx,
if (fval0(icount)-fval)<1.e-2*(gg'*(H*gg))/2 && igibbs
if length(find(ig))<nx
ggx=ggx*0;
ggx(find(ig))=gg(find(ig));
hhx = reshape(dum,nx,nx);
iggx=eye(length(gg));
iggx(find(ig),find(ig)) = inv( hhx(find(ig),find(ig)) );
[fvala x0 fc retcode] = csminit(func0,x0,fval,ggx,0,iggx,varargin{:});
[fvala,x0,fc,retcode] = csminit1(func0,x0,fval,ggx,0,iggx,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
end
[fvala, x0, ig] = mr_gstep(h1,x0,func0,htol,varargin{:});
% if length(find(ig))==0,
% [fvala, x0, ig] = mr_gstep(h1,x0,func0,htol/10,varargin{:});
% end
[fvala, x0, ig] = mr_gstep(h1,x0,func0,htol,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
nig=[nig ig];
disp('Sequence of univariate steps!!')
fval=fvala;
end
if (fval0(icount)-fval)<ftol && flagit==0,
if (fval0(icount)-fval)<ftol && flagit==0
disp('Try diagonal Hessian')
ihh=diag(1./(diag(hhg)));
[fval2 x0 fc retcode2] = csminit(func2str(func),x0,fval,gg,0,ihh,varargin{:});
if (fval-fval2)>=ftol ,
%hh=diag(diag(hh));
disp('Diagonal Hessian successful')
ihh=diag(1./(diag(hhg)));
[fval2,x0,fc,retcode2] = csminit1(func0,x0,fval,gg,0,ihh,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
if (fval-fval2)>=ftol
disp('Diagonal Hessian successful')
end
fval=fval2;
end
if (fval0(icount)-fval)<ftol && flagit==0,
end
if (fval0(icount)-fval)<ftol && flagit==0
disp('Try gradient direction')
ihh0=inx.*1.e-4;
[fval3 x0 fc retcode3] = csminit(func2str(func),x0,fval,gg,0,ihh0,varargin{:});
if (fval-fval3)>=ftol ,
%hh=hh0;
%ihh=ihh0;
disp('Gradient direction successful')
ihh0=inx.*1.e-4;
[fval3,x0,fc,retcode3] = csminit1(func0,x0,fval,gg,0,ihh0,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
if (fval-fval3)>=ftol
disp('Gradient direction successful')
end
fval=fval3;
end
end
xparam1=x0;
x(:,icount+1)=xparam1;
fval0(icount+1)=fval;
if (fval0(icount)-fval)<ftol,
if (fval0(icount)-fval)<ftol
disp('No further improvement is possible!')
check=1;
if flagit==2,
if flagit==2
hh=hh0;
elseif flagg>0,
[dum, gg, htol0, igg, hhg,h1]=mr_hessian(0,xparam1,func_hh,flagg,ftol0,varargin{:});
if flagg==2,
elseif flagg>0
[dum, gg, htol0, igg, hhg,h1]=mr_hessian(0,xparam1,func_hh,flagg,ftol0,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
if flagg==2
hh = reshape(dum,nx,nx);
ee=eig(hh);
if min(ee)<0
......@@ -186,48 +177,38 @@ while norm(gg)>gtol && check==0 && jit<nit,
disp(['Maximum Hessian eigenvalue ',num2str(max(ee))])
g(:,icount+1)=gg;
else
df = fval0(icount)-fval;
disp(['Actual dxnorm ',num2str(norm(x(:,end)-x(:,end-1)))])
disp(['FVAL ',num2str(fval)])
disp(['Improvement ',num2str(df)])
disp(['Ftol ',num2str(ftol)])
disp(['Htol ',num2str(htol0)])
% if df<htol0,
% htol=max(htol_base,df/10);
% end
htol=htol_base;
if norm(x(:,icount)-xparam1)>1.e-12,
try
if norm(x(:,icount)-xparam1)>1.e-12
try
save m1.mat x fval0 nig -append
catch
save m1.mat x fval0 nig
save m1.mat x fval0 nig
end
[dum, gg, htol0, igg, hhg, h1]=mr_hessian(0,xparam1,func_hh,flagit,htol,varargin{:});
if htol0>htol, %ftol,
%ftol=htol0;
[dum, gg, htol0, igg, hhg, h1]=mr_hessian(0,xparam1,func_hh,flagit,htol,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
if htol0>htol
htol=htol0;
disp(' ')
disp('Numerical noise in the likelihood')
disp('Tolerance has to be relaxed')
disp(' ')
% elseif htol0<ftol,
% ftol=max(htol0, ftol0);
end
hh0 = reshape(dum,nx,nx);
hh=hhg;
if flagit==2,
if min(eig(hh0))<=0,
if flagit==2
if min(eig(hh0))<=0
hh0=hhg; %generalized_cholesky(hh0);
else
else
hh=hh0;
igg=inv(hh);
end
end
end
disp(['Gradient norm ',num2str(norm(gg))])
ee=eig(hh);
disp(['Minimum Hessian eigenvalue ',num2str(min(ee))])
......@@ -235,29 +216,27 @@ while norm(gg)>gtol && check==0 && jit<nit,
if max(eig(hh))<0, disp('Negative definite Hessian! Local maximum!'), pause, end,
t=toc;
disp(['Elapsed time for iteration ',num2str(t),' s.'])
g(:,icount+1)=gg;
% H = bfgsi(H,g(:,end)-g(:,end-1),x(:,end)-x(:,end-1));
H = igg;
save m1.mat x hh g hhg igg fval0 nig H
end
end
save m1.mat x hh g hhg igg fval0 nig
if ftol>ftol0,
if ftol>ftol0
disp(' ')
disp('Numerical noise in the likelihood')
disp('Tolerance had to be relaxed')
disp(' ')
end
if jit==nit,
if jit==nit
disp(' ')
disp('Maximum number of iterations reached')
disp(' ')
end
if norm(gg)<=gtol,
if norm(gg)<=gtol
disp(['Estimation ended:'])
disp(['Gradient norm < ', num2str(gtol)])
end
......@@ -267,15 +246,7 @@ end
return
%
function f00 = lsearch(lam,func,x,dx,varargin)
x0=x-dx*lam;
f00=feval(func,x0,varargin{:});
function f00 = lsearch(lam,func,x,dx,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults)
x0=x-dx*lam;
f00=feval(func,x0,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults);
\ No newline at end of file
......@@ -48,6 +48,7 @@ MODFILES = \
fs2000/fs2000.mod \
fs2000/fs2000a.mod \
fs2000/fs2000c.mod \
fs2000/fs2000d.mod \