dynare_estimation_1.m 48.9 KB
Newer Older
1
2
3
function dynare_estimation_1(var_list_,dname)
% function dynare_estimation_1(var_list_,dname)
% runs the estimation of the model
4
%
5
6
7
% INPUTS
%   var_list_:  selected endogenous variables vector
%   dname:      alternative directory name
8
%
9
10
11
12
13
14
% OUTPUTS
%   none
%
% SPECIAL REQUIREMENTS
%   none

15
% Copyright (C) 2003-2013 Dynare Team
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
%
% This file is part of Dynare.
%
% Dynare is free software: you can redistribute it and/or modify
% it under the terms of the GNU General Public License as published by
% the Free Software Foundation, either version 3 of the License, or
% (at your option) any later version.
%
% Dynare is distributed in the hope that it will be useful,
% but WITHOUT ANY WARRANTY; without even the implied warranty of
% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
% GNU General Public License for more details.
%
% You should have received a copy of the GNU General Public License
% along with Dynare.  If not, see <http://www.gnu.org/licenses/>.

32
global M_ options_ oo_ estim_params_ bayestopt_ dataset_
33

34
35
% Set particle filter flag.
if options_.order > 1
36
    if options_.particle.status && options_.order==2
37
        skipline()
38
        disp('Estimation using a non linear filter!')
39
        skipline()
40
        if ~options_.nointeractive && ismember(options_.mode_compute,[1,3,4]) % Known gradient-based optimizers
Stéphane Adjemian's avatar
Stéphane Adjemian committed
41
42
            disp('You are using a gradient-based mode-finder. Particle filtering introduces discontinuities in the') 
            disp('objective function w.r.t the parameters. Thus, should use a non-gradient based optimizer.')
43
            fprintf('\nPlease choose a mode-finder:\n')
44
            fprintf('\t 0 - Continue using gradient-based method (it is most likely that you will no get any sensible result).\n')
45
            fprintf('\t 6 - Monte Carlo based algorithm\n')
46
47
            fprintf('\t 7 - Nelder-Mead simplex based optimization routine (Matlab optimization toolbox required)\n')
            fprintf('\t 8 - Nelder-Mead simplex based optimization routine (Dynare''s implementation)\n')
48
            fprintf('\t 9 - CMA-ES (Covariance Matrix Adaptation Evolution Strategy) algorithm\n')
49
50
51
52
53
54
55
56
57
58
59
            choice = [];
            while isempty(choice)
                choice = input('Please enter your choice: ');
                if isnumeric(choice) && isint(choice) && ismember(choice,[0 6 7 8 9])
                    if choice
                        options_.mode_compute = choice;
                    end
                else
                    fprintf('\nThis is an invalid choice (you have to choose between 0, 6, 7, 8 and 9).\n')
                    choice = [];
                end
60
61
            end
        end
62
    elseif options_.particle.status && options_.order>2
63
64
        error(['Non linear filter are not implemented with order ' int2str(options_.order) ' approximation of the model!'])
    elseif ~options_.particle.status && options_.order==2
65
        error('For estimating the model with a second order approximation using a non linear filter, one should have options_.particle.status=1;')
66
67
68
    else
        error(['Cannot estimate a model with an order ' int2str(options_.order) ' approximation!'])
    end
69
70
end

71
if ~options_.dsge_var
72
    if options_.particle.status
73
74
75
76
        objective_function = str2func('non_linear_dsge_likelihood');
    else
        objective_function = str2func('dsge_likelihood');
    end
77
78
79
80
else
    objective_function = str2func('DsgeVarLikelihood');
end

81
[dataset_,xparam1, hh, M_, options_, oo_, estim_params_,bayestopt_] = dynare_estimation_init(var_list_, dname, [], M_, options_, oo_, estim_params_, bayestopt_);
82

83
% Set sigma_e_is_diagonal flag (needed if the shocks block is not declared in the mod file).
84
M_.sigma_e_is_diagonal = 1;
85
if estim_params_.ncx || any(nnz(tril(M_.Sigma_e,-1)))
86
87
88
    M_.sigma_e_is_diagonal = 0;
end

89
90
91
% Set the correlation matrix if necessary.
if ~isequal(estim_params_.ncx,nnz(tril(M_.Sigma_e,-1)))
    M_.Correlation_matrix = diag(1./sqrt(diag(M_.Sigma_e)))*M_.Sigma_e*diag(1./sqrt(diag(M_.Sigma_e)));
92
93
94
95
96
97
98
99
    % Remove NaNs appearing because of variances calibrated to zero.
    if any(isnan(M_.Correlation_matrix))
        zero_variance_idx = find(~diag(M_.Sigma_e));
        for i=1:length(zero_variance_idx)
            M_.Correlation_matrix(zero_variance_idx(i),:) = 0;
            M_.Correlation_matrix(:,zero_variance_idx(i)) = 0;
        end
    end
100
101
end

102
103
104
105
106
107
108
109
110
111
112
113
114
% Set the correlation matrix of measurement errors if necessary.
if ~isequal(estim_params_.ncn,nnz(tril(M_.H,-1)))
    M_.Correlation_matrix_ME = diag(1./sqrt(diag(M_.H)))*M_.H*diag(1./sqrt(diag(M_.H)));
    % Remove NaNs appearing because of variances calibrated to zero.
    if any(isnan(M_.Correlation_matrix_ME))
        zero_variance_idx = find(~diag(M_.H));
        for i=1:length(zero_variance_idx)
            M_.Correlation_matrix_ME(zero_variance_idx(i),:) = 0;
            M_.Correlation_matrix_ME(:,zero_variance_idx(i)) = 0;
        end
    end
end

115
116
data = dataset_.data;
rawdata = dataset_.rawdata;
MichelJuillard's avatar
MichelJuillard committed
117
118
data_index = dataset_.missing.aindex;
missing_value = dataset_.missing.state;
119

120
% Set number of observations
121
gend = options_.nobs;
122
% Set the number of observed variables.
123
n_varobs = size(options_.varobs,1);
124
% Get the number of parameters to be estimated.
125
126
127
128
129
130
nvx = estim_params_.nvx;  % Variance of the structural innovations (number of parameters).
nvn = estim_params_.nvn;  % Variance of the measurement innovations (number of parameters).
ncx = estim_params_.ncx;  % Covariance of the structural innovations (number of parameters).
ncn = estim_params_.ncn;  % Covariance of the measurement innovations (number of parameters).
np  = estim_params_.np ;  % Number of deep parameters.
nx  = nvx+nvn+ncx+ncn+np; % Total number of parameters to be estimated.
131
132
133
134
135
%% Set the names of the priors.
pnames = ['     ';'beta ';'gamm ';'norm ';'invg ';'unif ';'invg2'];
%% Set parameters bounds
lb = bayestopt_.lb;
ub = bayestopt_.ub;
136

137
dr = oo_.dr;
138

139
140
141
142
143
144
145
146
147
148
149
150
151
152
%% load optimal_mh_scale parameter if previous run was with
%% mode_compute=6
mh_scale_fname = [M_.fname '_optimal_mh_scale_parameter.mat'];
if exist(mh_scale_fname)
    if options_.mode_compute == 0
        tmp = load(mh_scale_fname,'Scale');
        bayestopt_.mh_jscale = tmp.Scale;
        clear tmp;
    else
        % remove the file if mode_compute ~= 0
        delete('mh_scale_fname')
    end
end

153
154
155
if ~isempty(estim_params_)
    set_parameters(xparam1);
end
156

157
% compute sample moments if needed (bvar-dsge)
158
if options_.dsge_var
159
    if dataset_.missing.state
160
161
162
163
164
        error('I cannot estimate a DSGE-VAR model with missing observations!')
    end
    if options_.noconstant
        evalin('base',...
               ['[mYY,mXY,mYX,mXX,Ydata,Xdata] = ' ...
165
                'var_sample_moments(options_.first_obs,' ...
166
                'options_.first_obs+options_.nobs-1,options_.dsge_varlag,-1,' ...
167
168
169
170
                'options_.datafile, options_.varobs,options_.xls_sheet,options_.xls_range);'])
    else% The steady state is non zero ==> a constant in the VAR is needed!
        evalin('base',['[mYY,mXY,mYX,mXX,Ydata,Xdata] = ' ...
                       'var_sample_moments(options_.first_obs,' ...
171
                       'options_.first_obs+options_.nobs-1,options_.dsge_varlag,0,' ...
172
173
174
175
176
                       'options_.datafile, options_.varobs,options_.xls_sheet,options_.xls_range);'])
    end
end


177
oo_ = initial_estimation_checks(objective_function,xparam1,dataset_,M_,estim_params_,options_,bayestopt_,oo_);
178

179
if isequal(options_.mode_compute,0) && isempty(options_.mode_file) && options_.mh_posterior_mode_estimation==0
180
    if options_.smoother == 1
181
        [atT,innov,measurement_error,updated_variables,ys,trend_coeff,aK,T,R,P,PK,decomp] = DsgeSmoother(xparam1,gend,data,data_index,missing_value);
182
183
        oo_.Smoother.SteadyState = ys;
        oo_.Smoother.TrendCoeffs = trend_coeff;
184
        if options_.filter_covariance
185
            oo_.Smoother.Variance = P;
186
        end
187
        i_endo = bayestopt_.smoother_saved_var_list;
188
        if options_.nk ~= 0
189
190
191
192
193
194
195
196
197
198
            oo_.FilteredVariablesKStepAhead = ...
                aK(options_.filter_step_ahead,i_endo,:);
            if ~isempty(PK)
                oo_.FilteredVariablesKStepAheadVariances = ...
                    PK(options_.filter_step_ahead,i_endo,i_endo,:);
            end
            if ~isempty(decomp)
                oo_.FilteredVariablesShockDecomposition = ...
                    decomp(options_.filter_step_ahead,i_endo,:,:);
            end
199
        end
200
        for i=bayestopt_.smoother_saved_var_list'
201
            i1 = dr.order_var(bayestopt_.smoother_var_list(i));
202
203
204
205
206
207
            eval(['oo_.SmoothedVariables.' deblank(M_.endo_names(i1,:)) ...
                  ' = atT(i,:)'';']);
            if options_.nk > 0
                eval(['oo_.FilteredVariables.' deblank(M_.endo_names(i1,:)) ...
                      ' = squeeze(aK(1,i,:));']);
            end
208
            eval(['oo_.UpdatedVariables.' deblank(M_.endo_names(i1,:)) ' = updated_variables(i,:)'';']);
209
210
211
212
        end
        for i=1:M_.exo_nbr
            eval(['oo_.SmoothedShocks.' deblank(M_.exo_names(i,:)) ' = innov(i,:)'';']);
        end
213
    end
214
    return
215
216
end

Stéphane Adjemian's avatar
Stéphane Adjemian committed
217
% Estimation of the posterior mode or likelihood mode
218
if ~isequal(options_.mode_compute,0) && ~options_.mh_posterior_mode_estimation
219
220
    switch options_.mode_compute
      case 1
221
222
223
224
225
        if exist('OCTAVE_VERSION')
            error('Option mode_compute=1 is not available under Octave')
        elseif ~user_has_matlab_license('optimization_toolbox')
            error('Option mode_compute=1 requires the Optimization Toolbox')
        end
Stéphane Adjemian's avatar
Stéphane Adjemian committed
226
227
        % Set default optimization options for fmincon.
        optim_options = optimset('display','iter', 'LargeScale','off', 'MaxFunEvals',100000, 'TolFun',1e-8, 'TolX',1e-6);
228
229
        if isfield(options_,'optim_opt')
            eval(['optim_options = optimset(optim_options,' options_.optim_opt ');']);
230
231
        end
        if options_.analytic_derivation,
232
            optim_options = optimset(optim_options,'GradObj','on','TolX',1e-7);
233
        end
Stéphane Adjemian's avatar
Stéphane Adjemian committed
234
235
        [xparam1,fval,exitflag,output,lamdba,grad,hessian_fmincon] = ...
            fmincon(objective_function,xparam1,[],[],[],[],lb,ub,[],optim_options,dataset_,options_,M_,estim_params_,bayestopt_,oo_);
236
237
238
      case 2
        error('ESTIMATION: mode_compute=2 option (Lester Ingber''s Adaptive Simulated Annealing) is no longer available')
      case 3
239
240
241
242
243
        if exist('OCTAVE_VERSION') && ~user_has_octave_forge_package('optim')
            error('Option mode_compute=3 requires the optim package')
        elseif ~exist('OCTAVE_VERSION') && ~user_has_matlab_license('optimization_toolbox')
            error('Option mode_compute=3 requires the Optimization Toolbox')
        end
Stéphane Adjemian's avatar
Stéphane Adjemian committed
244
        % Set default optimization options for fminunc.
245
246
247
248
        optim_options = optimset('display','iter','MaxFunEvals',100000,'TolFun',1e-8,'TolX',1e-6);
        if isfield(options_,'optim_opt')
            eval(['optim_options = optimset(optim_options,' options_.optim_opt ');']);
        end
249
250
251
        if options_.analytic_derivation,
            optim_options = optimset(optim_options,'GradObj','on');
        end
252
253
254
255
256
257
258
        if ~exist('OCTAVE_VERSION')
            [xparam1,fval,exitflag] = fminunc(objective_function,xparam1,optim_options,dataset_,options_,M_,estim_params_,bayestopt_,oo_);
        else
            % Under Octave, use a wrapper, since fminunc() does not have a 4th arg
            func = @(x) objective_function(x, dataset_,options_,M_,estim_params_,bayestopt_,oo_);
            [xparam1,fval,exitflag] = fminunc(func,xparam1,optim_options);
        end
259
      case 4
260
        % Set default options.
261
262
263
264
        H0 = 1e-4*eye(nx);
        crit = 1e-7;
        nit = 1000;
        verbose = 2;
265
266
267
268
269
270
271
272
273
274
275
        numgrad = options_.gradient_method;
        epsilon = options_.gradient_epsilon;
        % Change some options.
        if isfield(options_,'optim_opt')
            options_list = strsplit(options_.optim_opt,',');
            number_of_options = length(options_list)/2;
            o = 1;
            while o<=number_of_options
                switch strtrim(options_list{2*(o-1)+1})
                  case '''MaxIter'''
                    nit = str2num(options_list{2*(o-1)+2});
276
                  case '''InitialInverseHessian'''
277
                    H0 = eval(eval(options_list{2*(o-1)+2}));
278
                  case '''TolFun'''
279
280
281
282
283
284
                    crit = str2double(options_list{2*(o-1)+2});
                  case '''NumgradAlgorithm'''
                    numgrad = str2num(options_list{2*(o-1)+2});
                  case '''NumgradEpsilon'''
                    epsilon = str2double(options_list{2*(o-1)+2});
                  otherwise
285
                    warning(['csminwel: Unknown option (' options_list{2*(o-1)+1}  ')!'])
286
287
288
289
290
291
                end
                o = o + 1;
            end
        end
        % Set flag for analytical gradient.
        if options_.analytic_derivation
292
293
294
295
            analytic_grad=1;
        else
            analytic_grad=[];
        end
296
297
298
299
300
        % Call csminwell.
        [fval,xparam1,grad,hessian_csminwel,itct,fcount,retcodehat] = ...
            csminwel1(objective_function, xparam1, H0, analytic_grad, crit, nit, numgrad, epsilon, dataset_, options_, M_, estim_params_, bayestopt_, oo_);
        % Disp value at the mode.
        disp(sprintf('Objective function at mode: %f',fval))
301
302
303
304
305
306
307
308
309
      case 5
        if isfield(options_,'hess')
            flag = options_.hess;
        else
            flag = 1;
        end
        if isfield(options_,'ftol')
            crit = options_.ftol;
        else
310
            crit = 1.e-5;
311
        end
312
313
314
315
316
317
318
319
320
        if options_.analytic_derivation,
            analytic_grad=1;
            ana_deriv = options_.analytic_derivation;
            options_.analytic_derivation = -1;
            crit = 1.e-7;
            flag = 0;
        else
            analytic_grad=0;
        end
321
322
323
324
325
        if isfield(options_,'nit')
            nit = options_.nit;
        else
            nit=1000;
        end
326
327
328
329
        [xparam1,hh,gg,fval,invhess] = newrat(objective_function,xparam1,analytic_grad,crit,nit,flag,dataset_,options_,M_,estim_params_,bayestopt_,oo_);
        if options_.analytic_derivation,
            options_.analytic_derivation = ana_deriv;
        end
330
331
332
        parameter_names = bayestopt_.name;
        save([M_.fname '_mode.mat'],'xparam1','hh','gg','fval','invhess','parameter_names');
      case 6
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
        % Set default options
        gmhmaxlikOptions = options_.gmhmaxlik;
        if ~isempty(hh);
            gmhmaxlikOptions.varinit = 'previous';
        else
            gmhmaxlikOptions.varinit = 'prior';
        end
        if isfield(options_,'optim_opt')
            options_list = strsplit(options_.optim_opt,',');
            number_of_options = length(options_list)/2;
            o = 1;
            while o<=number_of_options
                switch strtrim(options_list{2*(o-1)+1})
                  case '''NumberOfMh'''
                    gmhmaxlikOptions.iterations = str2num(options_list{2*(o-1)+2});
                  case '''ncov-mh'''
                    gmhmaxlikOptions.number = str2num(options_list{2*(o-1)+2});
                  case '''nscale'''
                    gmhmaxlikOptions.nscale = str2double(options_list{2*(o-1)+2});
                  case '''nclimb'''
                    gmhmaxlikOptions.nclimb = str2num(options_list{2*(o-1)+2});
                  case '''InitialCovarianceMatrix'''
                    switch eval(options_list{2*(o-1)+2})
                      case 'previous'
                        if isempty(hh)
                            error('gmhmaxlik: No previous estimate of the Hessian matrix available!')
                        else
                            gmhmaxlikOptions.varinit = 'previous'
                        end
                      case {'prior', 'identity'}
                        gmhmaxlikOptions.varinit = eval(options_list{2*(o-1)+2});
                      otherwise
                        error('gmhmaxlik: Unknown value for option ''InitialCovarianceMatrix''!')
                    end
                  case '''AcceptanceRateTarget'''
                    gmhmaxlikOptions.target = str2num(options_list{2*(o-1)+2});
                    if gmhmaxlikOptions.target>1 || gmhmaxlikOptions.target<eps
                        error('gmhmaxlik: The value of option AcceptanceRateTarget should be a double between 0 and 1!')
                    end
                  otherwise
                    Warning(['gmhmaxlik: Unknown option (' options_list{2*(o-1)+1}  ')!'])
                end
                o = o + 1;
            end
        end        
        % Evaluate the objective function.
379
        fval = feval(objective_function,xparam1,dataset_,options_,M_,estim_params_,bayestopt_,oo_);
380
        OldMode = fval;
381
        if ~exist('MeanPar','var')
382
383
            MeanPar = xparam1;
        end
384
385
        switch gmhmaxlikOptions.varinit
          case 'previous'
386
            CovJump = inv(hh);
387
388
          case 'prior'
            % The covariance matrix is initialized with the prior
389
            % covariance (a diagonal matrix) %%Except for infinite variances ;-)
390
391
392
393
394
395
396
397
398
399
            stdev = bayestopt_.p2;
            indx = find(isinf(stdev));
            stdev(indx) = ones(length(indx),1)*sqrt(10);
            vars = stdev.^2;
            CovJump = diag(vars);
          case 'identity'
            vars = ones(length(bayestopt_.p2),1)*0.1;
            CovJump = diag(vars);
          otherwise
            error('gmhmaxlik: This is a bug! Please contact the developers.')
400
401
402
        end
        OldPostVar = CovJump;
        Scale = options_.mh_jscale;
403
        for i=1:gmhmaxlikOptions.iterations
404
            if i == 1
405
                if gmhmaxlikOptions.iterations>1
406
407
408
409
                    flag = '';
                else
                    flag = 'LastCall';
                end
410
                [xparam1,PostVar,Scale,PostMean] = ...
411
                    gmhmaxlik(objective_function,xparam1,[lb ub],gmhmaxlikOptions,Scale,flag,MeanPar,CovJump,dataset_,options_,M_,estim_params_,bayestopt_,oo_);
412
                fval = feval(objective_function,xparam1,dataset_,options_,M_,estim_params_,bayestopt_,oo_);
413
414
                options_.mh_jscale = Scale;
                mouvement = max(max(abs(PostVar-OldPostVar)));
415
                skipline()
416
417
418
419
420
                disp('========================================================== ')
                disp(['   Change in the covariance matrix = ' num2str(mouvement) '.'])
                disp(['   Mode improvement = ' num2str(abs(OldMode-fval))])
                disp(['   New value of jscale = ' num2str(Scale)])
                disp('========================================================== ')
421
422
423
                OldMode = fval;
            else
                OldPostVar = PostVar;
424
                if i<gmhmaxlikOptions.iterations
425
426
427
428
                    flag = '';
                else
                    flag = 'LastCall';
                end
429
430
                [xparam1,PostVar,Scale,PostMean] = ...
                    gmhmaxlik(objective_function,xparam1,[lb ub],...
431
                              gmhmaxlikOptions,Scale,flag,PostMean,PostVar,dataset_,options_,M_,estim_params_,bayestopt_,oo_);
432
                fval = feval(objective_function,xparam1,dataset_,options_,M_,estim_params_,bayestopt_,oo_);
433
434
                options_.mh_jscale = Scale;
                mouvement = max(max(abs(PostVar-OldPostVar)));
435
                fval = feval(objective_function,xparam1,dataset_,options_,M_,estim_params_,bayestopt_,oo_);
436
                skipline()
437
438
439
440
441
                disp('========================================================== ')
                disp(['   Change in the covariance matrix = ' num2str(mouvement) '.'])
                disp(['   Mode improvement = ' num2str(abs(OldMode-fval))])
                disp(['   New value of jscale = ' num2str(Scale)])
                disp('========================================================== ')
442
443
444
                OldMode = fval;
            end
            hh = inv(PostVar);
445
446
            parameter_names = bayestopt_.name;
            save([M_.fname '_mode.mat'],'xparam1','hh','parameter_names');
447
            save([M_.fname '_optimal_mh_scale_parameter.mat'],'Scale');
448
            bayestopt_.jscale = ones(length(xparam1),1)*Scale;
449
        end
450
        skipline()
451
        disp(['Optimal value of the scale parameter = ' num2str(Scale)])
452
        skipline()
453
        disp(['Final value of the log posterior (or likelihood): ' num2str(fval)])
454
        skipline()
455
456
        parameter_names = bayestopt_.name;
        save([M_.fname '_mode.mat'],'xparam1','hh','parameter_names');
457
      case 7
458
        % Matlab's simplex (Optimization toolbox needed).
459
460
461
462
463
        if exist('OCTAVE_VERSION') && ~user_has_octave_forge_package('optim')
            error('Option mode_compute=7 requires the optim package')
        elseif ~exist('OCTAVE_VERSION') && ~user_has_matlab_license('optimization_toolbox')
            error('Option mode_compute=7 requires the Optimization Toolbox')
        end
464
465
466
467
        optim_options = optimset('display','iter','MaxFunEvals',1000000,'MaxIter',6000,'TolFun',1e-8,'TolX',1e-6);
        if isfield(options_,'optim_opt')
            eval(['optim_options = optimset(optim_options,' options_.optim_opt ');']);
        end
468
        [xparam1,fval,exitflag] = fminsearch(objective_function,xparam1,optim_options,dataset_,options_,M_,estim_params_,bayestopt_,oo_);
469
      case 8
470
        % Dynare implementation of the simplex algorithm.
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
        simplexOptions = options_.simplex;
        if isfield(options_,'optim_opt')
            options_list = strsplit(options_.optim_opt,',');
            number_of_options = length(options_list)/2;
            o = 1;
            while o<=number_of_options
                switch strtrim(options_list{2*(o-1)+1})
                  case '''MaxIter'''
                    simplexOptions.maxiter = str2num(options_list{2*(o-1)+2});
                  case '''TolFun'''
                    simplexOptions.tolerance.f = str2double(options_list{2*(o-1)+2});
                  case '''TolX'''
                    simplexOptions.tolerance.x = str2double(options_list{2*(o-1)+2});
                  case '''MaxFunEvals'''
                    simplexOptions.maxfcall = str2num(options_list{2*(o-1)+2});
                  case '''MaxFunEvalFactor'''
                    simplexOptions.maxfcallfactor = str2num(options_list{2*(o-1)+2});
                  case '''InitialSimplexSize'''
                    simplexOptions.delta_factor = str2double(options_list{2*(o-1)+2});
                  otherwise
                    warning(['simplex: Unknown option (' options_list{2*(o-1)+1}  ')!'])
                end
                o = o + 1;
            end
        end
        [xparam1,fval,exitflag] = simplex_optimization_routine(objective_function,xparam1,simplexOptions,dataset_,options_,M_,estim_params_,bayestopt_,oo_);
497
      case 9
498
        % Set defaults
499
        H0 = 1e-4*ones(nx,1);
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
        cmaesOptions = options_.cmaes;
        % Modify defaults
        if isfield(options_,'optim_opt')
            options_list = strsplit(options_.optim_opt,',');
            number_of_options = length(options_list)/2;
            o = 1;
            while o<=number_of_options
                switch strtrim(options_list{2*(o-1)+1})
                  case '''MaxIter'''
                    cmaesOptions.MaxIter = str2num(options_list{2*(o-1)+2});
                  case '''TolFun'''
                    cmaesOptions.TolFun = str2double(options_list{2*(o-1)+2});
                  case '''TolX'''
                    cmaesOptions.TolX = str2double(options_list{2*(o-1)+2});
                  case '''MaxFunEvals'''
                    cmaesOptions.MaxFunEvals = str2num(options_list{2*(o-1)+2});
                  otherwise
                    warning(['cmaes: Unknown option (' options_list{2*(o-1)+1}  ')!'])
                end
                o = o + 1;
            end
        end
522
523
        warning('off','CMAES:NonfinitenessRange');
        warning('off','CMAES:InitialSigma');
524
        [x, fval, COUNTEVAL, STOPFLAG, OUT, BESTEVER] = cmaes(func2str(objective_function),xparam1,H0,cmaesOptions,dataset_,options_,M_,estim_params_,bayestopt_,oo_);
525
        xparam1=BESTEVER.x;
526
        disp(sprintf('\n Objective function at mode: %f',fval))
527
      case 10
528
529
530
531
532
533
534
535
536
537
538
539
        simpsaOptions = options_.simpsa;
        if isfield(options_,'optim_opt')
            options_list = strsplit(options_.optim_opt,',');
            number_of_options = length(options_list)/2;
            o = 1;
            while o<=number_of_options
                switch strtrim(options_list{2*(o-1)+1})
                  case '''MaxIter'''
                    simpsaOptions.MAX_ITER_TOTAL = str2num(options_list{2*(o-1)+2});
                  case '''TolFun'''
                    simpsaOptions.TOLFUN = str2double(options_list{2*(o-1)+2});
                  case '''TolX'''
540
541
542
543
544
545
                    tolx = str2double(options_list{2*(o-1)+2});
                    if tolx<0
                        simpsaOptions = rmfield(simpsaOptions,'TOLX'); % Let cmaes choose the default.
                    else
                        simpsaOptions.TOLX = tolx;
                    end
546
547
548
549
550
551
552
553
554
555
556
557
558
                  case '''EndTemparature'''
                    simpsaOptions.TEMP_END = str2double(options_list{2*(o-1)+2});
                  case '''MaxFunEvals'''
                    simpsaOptions.MAX_FUN_EVALS = str2num(options_list{2*(o-1)+2});
                  otherwise
                    warning(['simpsa: Unknown option (' options_list{2*(o-1)+1}  ')!'])
                end
                o = o + 1;
            end
        end
        simpsaOptionsList = options2cell(simpsaOptions);
        simpsaOptions = simpsaset(simpsaOptionsList{:});
        [xparam1, fval, exitflag] = simpsa(func2str(objective_function),xparam1,lb,ub,simpsaOptions,dataset_,options_,M_,estim_params_,bayestopt_,oo_);
559
      case 11 
560
561
         options_.cova_compute = 0 ;
         [xparam1,stdh,lb_95,ub_95,med_param] = online_auxiliary_filter(xparam1,dataset_,options_,M_,estim_params_,bayestopt_,oo_) ;
562
      case 101
563
564
565
566
        myoptions=soptions;
        myoptions(2)=1e-6; %accuracy of argument
        myoptions(3)=1e-6; %accuracy of function (see Solvopt p.29)
        myoptions(5)= 1.0;
567
        [xparam1,fval]=solvopt(xparam1,objective_function,[],myoptions,dataset_,options_,M_,estim_params_,bayestopt_,oo_);
568
569
570
571
572
573
574
      case 102
        %simulating annealing
        %        LB=zeros(size(xparam1))-20;
        % UB=zeros(size(xparam1))+20;
        LB = xparam1 - 1;
        UB = xparam1 + 1;
        neps=10;
575
        %  Set input parameters.
576
        maxy=0;
577
        epsilon=1.0e-9;
578
579
580
581
582
583
584
        rt_=.10;
        t=15.0;
        ns=10;
        nt=10;
        maxevl=100000000;
        idisp =1;
        npar=length(xparam1);
585
586

        disp(['size of param',num2str(length(xparam1))])
587
588
        c=.1*ones(npar,1);
        %*  Set input values of the input/output parameters.*
589

590
591
        vm=1*ones(npar,1);
        disp(['number of parameters= ' num2str(npar) 'max= '  num2str(maxy) 't=  ' num2str(t)]);
592
        disp(['rt_=  '  num2str(rt_) 'eps=  '  num2str(epsilon) 'ns=  '  num2str(ns)]);
593
594
595
596
597
598
599
600
601
        disp(['nt=  '  num2str(nt) 'neps= '   num2str(neps) 'maxevl=  '  num2str(maxevl)]);
        %      disp(['iprint=   '   num2str(iprint) 'seed=   '   num2str(seed)]);
        disp '  ';
        disp '  ';
        disp(['starting values(x)  ' num2str(xparam1')]);
        disp(['initial step length(vm)  '  num2str(vm')]);
        disp(['lower bound(lb)', 'initial conditions', 'upper bound(ub)' ]);
        disp([LB xparam1 UB]);
        disp(['c vector   ' num2str(c')]);
602

603
604
        [xparam1, fval, nacc, nfcnev, nobds, ier, t, vm] = sa(objective_function,xparam1,maxy,rt_,epsilon,ns,nt ...
                                                              ,neps,maxevl,LB,UB,c,idisp ,t,vm,dataset_,options_,M_,estim_params_,bayestopt_,oo_);
605
606
      otherwise
        if ischar(options_.mode_compute)
607
            [xparam1, fval, retcode ] = feval(options_.mode_compute,objective_function,xparam1,dataset_,options_,M_,estim_params_,bayestopt_,oo_);
608
        else
609
            error(['dynare_estimation:: mode_compute = ' int2str(options_.mode_compute) ' option is unknown!'])
610
611
        end
    end
612
613
    if ~isequal(options_.mode_compute,6) %always already computes covariance matrix
        if options_.cova_compute == 1 %user did not request covariance not to be computed
614
615
616
617
618
619
620
621
622
623
624
            if options_.analytic_derivation && strcmp(func2str(objective_function),'dsge_likelihood'),
                ana_deriv = options_.analytic_derivation;
                options_.analytic_derivation = 2;
                [junk1, junk2, hh] = feval(objective_function,xparam1, ...
                    dataset_,options_,M_,estim_params_,bayestopt_,oo_);
                options_.analytic_derivation = ana_deriv;
                
            else
                hh = reshape(hessian(objective_function,xparam1, ...
                    options_.gstep,dataset_,options_,M_,estim_params_,bayestopt_,oo_),nx,nx);
            end
625
        end
626
627
628
629
630
631
632
    end
    parameter_names = bayestopt_.name;
    if options_.cova_compute
        save([M_.fname '_mode.mat'],'xparam1','hh','parameter_names');
    else
        save([M_.fname '_mode.mat'],'xparam1','parameter_names');
    end
633
634
635
end

if options_.cova_compute == 0
636
    hh = [];%NaN(length(xparam1),length(xparam1));
637
638
end

639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
switch options_.MCMC_jumping_covariance
    case 'hessian' %Baseline
        %do nothing and use hessian from mode_compute
    case 'prior_variance' %Use prior variance
        if any(isinf(bayestopt_.p2))
            error('Infinite prior variances detected. You cannot use the prior variances as the proposal density, if some variances are Inf.')
        else            
            hh = diag(1./(bayestopt_.p2.^2));
        end
    case 'identity_matrix' %Use identity
        hh = eye(nx);   
    otherwise %user specified matrix in file
        try 
            load(options_.MCMC_jumping_covariance,'jumping_covariance')
            hh=jumping_covariance;
        catch
            error(['No matrix named ''jumping_covariance'' could be found in ',options_.MCMC_jumping_covariance,'.mat'])
        end
        [nrow, ncol]=size(hh);
        if ~isequal(nrow,ncol) && ~isequal(nrow,nx) %check if square and right size
            error(['jumping_covariance matrix must be square and have ',num2str(nx),' rows and columns'])
        end
        try %check for positive definiteness
            chol(hh);
        catch
            error(['Specified jumping_covariance is not positive definite'])
        end
end

668
if ~options_.mh_posterior_mode_estimation && options_.cova_compute
669
670
671
    try
        chol(hh);
    catch
672
        skipline()
673
674
675
        disp('POSTERIOR KERNEL OPTIMIZATION PROBLEM!')
        disp(' (minus) the hessian matrix at the "mode" is not positive definite!')
        disp('=> posterior variance of the estimated parameters are not positive.')
676
        disp('You should try to change the initial values of the parameters using')
677
        disp('the estimated_params_init block, or use another optimization routine.')
678
679
680
        params_at_bound=find(xparam1==ub | xparam1==lb);
        if ~isempty(params_at_bound)
            for ii=1:length(params_at_bound)
681
            params_at_bound_name{ii,1}=get_the_name(params_at_bound(ii),0,M_,estim_params_,options_);
682
683
684
685
686
687
688
689
690
691
692
693
694
695
            end
            disp_string=[params_at_bound_name{1,:}];
            for ii=2:size(params_at_bound_name,1)
                disp_string=[disp_string,', ',params_at_bound_name{ii,:}];
            end
            fprintf('\nThe following parameters are at the prior bound: %s\n', disp_string)
            fprintf('Some potential solutions are:\n')
            fprintf('   - Check your model for mistakes.\n')
            fprintf('   - Check whether model and data are consistent (correct observation equation).\n')
            fprintf('   - Shut off prior_trunc.\n')
            fprintf('   - Use a different mode_compute like 6 or 9.\n')
            fprintf('   - Check whether the parameters estimated are identified.\n')
            fprintf('   - Increase the informativeness of the prior.\n')
        end
696
697
        warning('The results below are most likely wrong!');
    end
698
699
end

700
if options_.mode_check.status == 1 && ~options_.mh_posterior_mode_estimation
701
702
    ana_deriv = options_.analytic_derivation;
    options_.analytic_derivation = 0;
Marco Ratto's avatar
Marco Ratto committed
703
    mode_check(objective_function,xparam1,hh,dataset_,options_,M_,estim_params_,bayestopt_,oo_);
704
    options_.analytic_derivation = ana_deriv;
705
706
end

707
oo_.posterior.optimization.mode = xparam1;
708
oo_.posterior.optimization.Variance = [];
709
710
711
712
if ~options_.mh_posterior_mode_estimation
    if options_.cova_compute
        invhess = inv(hh);
        stdh = sqrt(diag(invhess));
713
        oo_.posterior.optimization.Variance = invhess;
714
    end
715
else
716
717
718
719
720
721
722
    variances = bayestopt_.p2.*bayestopt_.p2;
    idInf = isinf(variances);
    variances(idInf) = 1;
    invhess = options_.mh_posterior_mode_estimation*diag(variances);
    xparam1 = bayestopt_.p5;
    idNaN = isnan(xparam1);
    xparam1(idNaN) = bayestopt_.p1(idNaN);
723
724
end

725
726
727
if ~options_.cova_compute
    stdh = NaN(length(xparam1),1);
end
728

729
if any(bayestopt_.pshape > 0) && ~options_.mh_posterior_mode_estimation
730
731
    %% display results table and store parameter estimates and standard errors in results
    oo_=display_estimation_results_table(xparam1,stdh,M_,options_,estim_params_,bayestopt_,oo_,pnames,'Posterior','posterior');
732
    %% Laplace approximation to the marginal log density:
733
734
735
736
    if options_.cova_compute
        estim_params_nbr = size(xparam1,1);
        scale_factor = -sum(log10(diag(invhess)));
        log_det_invhess = -estim_params_nbr*log(scale_factor)+log(det(scale_factor*invhess));
737
738
        likelihood = feval(objective_function,xparam1,dataset_,options_,M_,estim_params_,bayestopt_,oo_);
        oo_.MarginalDensity.LaplaceApproximation = .5*estim_params_nbr*log(2*pi) + .5*log_det_invhess - likelihood;
739
        skipline()
740
        disp(sprintf('Log data density [Laplace approximation] is %f.',oo_.MarginalDensity.LaplaceApproximation))
741
        skipline()
742
    end
743
elseif ~any(bayestopt_.pshape > 0) && ~options_.mh_posterior_mode_estimation
744
    oo_=display_estimation_results_table(xparam1,stdh,M_,options_,estim_params_,bayestopt_,oo_,pnames,'Maximum Likelihood','mle');
745
746
end

747
OutputDirectoryName = CheckPath('Output',M_.dname);
748
749
750
751
752
753

if np > 0
    pindx = estim_params_.param_vals(:,1);
    save([M_.fname '_params.mat'],'pindx');
end

754
755
if (any(bayestopt_.pshape  >0 ) && options_.mh_replic) || ...
        (any(bayestopt_.pshape >0 ) && options_.load_mh_file)  %% not ML estimation
756
    bounds = prior_bounds(bayestopt_,options_);
757
758
759
760
    bounds(:,1)=max(bounds(:,1),lb);
    bounds(:,2)=min(bounds(:,2),ub);
    bayestopt_.lb = bounds(:,1);
    bayestopt_.ub = bounds(:,2);
761
762
763
764
765
766
767
768
    outside_bound_pars=find(xparam1 < bounds(:,1) | xparam1 > bounds(:,2));
    if ~isempty(outside_bound_pars)
        for ii=1:length(outside_bound_pars)
            outside_bound_par_names{ii,1}=get_the_name(ii,0,M_,estim_params_,options_);
        end
        disp_string=[outside_bound_par_names{1,:}];
        for ii=2:size(outside_bound_par_names,1)
            disp_string=[disp_string,', ',outside_bound_par_names{ii,:}];
769
770
        end
        error(['Mode value(s) of ', disp_string ,' are outside parameter bounds. Potentially, you should set prior_trunc=0.'])
771
772
773
    end
    % runs MCMC
    if options_.mh_replic
774
        if options_.load_mh_file && options_.use_mh_covariance_matrix
775
776
            invhess = compute_mh_covariance_matrix;
        end
777
778
        ana_deriv = options_.analytic_derivation;
        options_.analytic_derivation = 0;
779
        if options_.cova_compute
780
            oo_.MC_record=feval(options_.posterior_sampling_method,objective_function,options_.proposal_distribution,xparam1,invhess,bounds,dataset_,options_,M_,estim_params_,bayestopt_,oo_);
781
        else
Johannes Pfeifer's avatar
Johannes Pfeifer committed
782
            error('I Cannot start the MCMC because the Hessian of the posterior kernel at the mode was not computed.')
783
        end
784
        options_.analytic_derivation = ana_deriv;
785
    end
786
787
788
    if options_.mh_posterior_mode_estimation
        CutSample(M_, options_, estim_params_);
        return
789
    else
790
791
        if ~options_.nodiagnostic && options_.mh_replic > 2000
            oo_= McMCDiagnostics(options_, estim_params_, M_,oo_);
792
        end
793
794
795
796
        %% Here i discard first half of the draws:
        CutSample(M_, options_, estim_params_);
        %% Estimation of the marginal density from the Mh draws:
        if options_.mh_replic
MichelJuillard's avatar
MichelJuillard committed
797
798
799
            [marginal,oo_] = marginal_density(M_, options_, estim_params_, ...
                                              oo_);
            % Store posterior statistics by parameter name
800
            oo_ = GetPosteriorParametersStatistics(estim_params_, M_, options_, bayestopt_, oo_);
801
802
803
            if ~options_.nograph
                oo_ = PlotPosteriorDistributions(estim_params_, M_, options_, bayestopt_, oo_);
            end
MichelJuillard's avatar
MichelJuillard committed
804
805
            % Store posterior mean in a vector and posterior variance in
            % a matrix
806
            [oo_.posterior.metropolis.mean,oo_.posterior.metropolis.Variance] ...
807
                = GetPosteriorMeanVariance(M_,options_.mh_drop);
808
809
        else
            load([M_.fname '_results'],'oo_');
810
        end
811
812
813
        metropolis_draw(1);
        if options_.bayesian_irf
            PosteriorIRF('posterior');
814
        end
815
816
        if options_.moments_varendo
            oo_ = compute_moments_varendo('posterior',options_,M_,oo_,var_list_);
817
        end
818
        if options_.smoother || ~isempty(options_.filter_step_ahead) || options_.forecast
819
            prior_posterior_statistics('posterior',dataset_);
820
821
        end
        xparam = get_posterior_parameters('mean');
822
        M_ = set_all_parameters(xparam,estim_params_,M_);
823
824
    end
end
825

826
827
828
829
if options_.particle.status
    return
end

830
831
if (~((any(bayestopt_.pshape > 0) && options_.mh_replic) || (any(bayestopt_.pshape ...
                                                      > 0) && options_.load_mh_file)) ...
MichelJuillard's avatar
MichelJuillard committed
832
    || ~options_.smoother ) && options_.partial_information == 0  % to be fixed
833
    %% ML estimation, or posterior mode without metropolis-hastings or metropolis without bayesian smooth variable
834
    [atT,innov,measurement_error,updated_variables,ys,trend_coeff,aK,T,R,P,PK,decomp] = DsgeSmoother(xparam1,dataset_.info.ntobs,dataset_.data,dataset_.missing.aindex,dataset_.missing.state);
835
836
    oo_.Smoother.SteadyState = ys;
    oo_.Smoother.TrendCoeffs = trend_coeff;
837
    oo_.Smoother.Variance = P;
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
    i_endo = bayestopt_.smoother_saved_var_list;
    if options_.nk ~= 0
        oo_.FilteredVariablesKStepAhead = aK(options_.filter_step_ahead, ...
                                             i_endo,:);
        if isfield(options_,'kalman_algo')
            if ~isempty(PK)
                oo_.FilteredVariablesKStepAheadVariances = ...
                    PK(options_.filter_step_ahead,i_endo,i_endo,:);
            end
            if ~isempty(decomp)
                oo_.FilteredVariablesShockDecomposition = ...
                    decomp(options_.filter_step_ahead,i_endo,:,:);
            end
        end
    end
    for i=bayestopt_.smoother_saved_var_list'
854
        i1 = dr.order_var(bayestopt_.smoother_var_list(i));
855
856
857
858
859
860
        eval(['oo_.SmoothedVariables.' deblank(M_.endo_names(i1,:)) ' = ' ...
                            'atT(i,:)'';']);
        if options_.nk > 0
            eval(['oo_.FilteredVariables.' deblank(M_.endo_names(i1,:)) ...
                  ' = squeeze(aK(1,i,:));']);
        end
861
862
863
        eval(['oo_.UpdatedVariables.' deblank(M_.endo_names(i1,:)) ...
              ' = updated_variables(i,:)'';']);
    end
864
865
866
    for i=1:M_.exo_nbr
        eval(['oo_.SmoothedShocks.' deblank(M_.exo_names(i,:)) ' = innov(i,:)'';']);
    end
867
868
    if ~options_.nograph,
        [nbplt,nr,nc,lr,lc,nstar] = pltorg(M_.exo_nbr);
869
        if options_.TeX
870
871
872
873
            fidTeX = fopen([M_.fname '_SmoothedShocks.TeX'],'w');
            fprintf(fidTeX,'%% TeX eps-loader file generated by dynare_estimation.m (Dynare).\n');
            fprintf(fidTeX,['%% ' datestr(now,0) '\n']);
            fprintf(fidTeX,' \n');
874
        end
875
        for plt = 1:nbplt,
876
            fh = dyn_figure(options_,'Name','Smoothed shocks');
877
878
            NAMES = [];
            if options_.TeX, TeXNAMES = []; end
879
            nstar0=min(nstar,M_.exo_nbr-(plt-1)*nstar);
880
881
882
883
884
885
886
            if gend==1
                marker_string{1,1}='-ro';
                marker_string{2,1}='-ko';
            else
                marker_string{1,1}='-r';
                marker_string{2,1}='-k';
            end
887
            for i=1:nstar0,
888
889
                k = (plt-1)*nstar+i;
                subplot(nr,nc,i);
890
                plot([1 gend],[0 0],marker_string{1,1},'linewidth',.5)
891
                hold on
892
                plot(1:gend,innov(k,:),marker_string{2,1},'linewidth',1)
893
894
                hold off
                name = deblank(M_.exo_names(k,:));
895
896
897
898
899
                if isempty(NAMES)
                    NAMES = name;
                else
                    NAMES = char(NAMES,name);
                end
900
901
902
                if ~isempty(options_.XTick)
                    set(gca,'XTick',options_.XTick)
                    set(gca,'XTickLabel',options_.XTickLabel)
903
                end
904
905
906
                if gend>1
                    xlim([1 gend])
                end
907
908
                if options_.TeX
                    texname = M_.exo_names_tex(k,:);
909
910
911
912
913
                    if isempty(TeXNAMES)
                        TeXNAMES = ['$ ' deblank(texname) ' $'];
                    else
                        TeXNAMES = char(TeXNAMES,['$ ' deblank(texname) ' $']);
                    end
914
                end
915
                title(name,'Interpreter','none')
916
            end
917
            dyn_saveas(fh,[M_.fname '_SmoothedShocks' int2str(plt)],options_);
918
            if options_.TeX
919
                fprintf(fidTeX,'\\begin{figure}[H]\n');
920
                for jj = 1:nstar0
921
                    fprintf(fidTeX,'\\psfrag{%s}[1][][0.5][0]{%s}\n',deblank(NAMES(jj,:)),deblank(TeXNAMES(jj,:)));
922
                end
923
924
925
926
927
928
                fprintf(fidTeX,'\\centering \n');
                fprintf(fidTeX,'\\includegraphics[scale=0.5]{%s_SmoothedShocks%s}\n',M_.fname,int2str(plt));
                fprintf(fidTeX,'\\caption{Smoothed shocks.}');
                fprintf(fidTeX,'\\label{Fig:SmoothedShocks:%s}\n',int2str(plt));
                fprintf(fidTeX,'\\end{figure}\n');
                fprintf(fidTeX,'\n');
929
            end
930
931
932
933
934
        end
        if options_.TeX
            fprintf(fidTeX,'\n');
            fprintf(fidTeX,'%% End of TeX file.\n');
            fclose(fidTeX);
935
        end
936
937
938
939
    end
    %%
    %%  Smooth observational errors...
    %%
940
941
942
943
944
    if options_.prefilter == 1
        yf = atT(bayestopt_.mf,:)+repmat(dataset_.descriptive.mean',1,gend);
    elseif options_.loglinear == 1
        yf = atT(bayestopt_.mf,:)+repmat(log(ys(bayestopt_.mfys)),1,gend)+...
             trend_coeff*[1:gend];
945
    else
946
947
        yf = atT(bayestopt_.mf,:)+repmat(ys(bayestopt_.mfys),1,gend)+...
             trend_coeff*[1:gend];
948
949
950
951
952
    end
    if nvn
        number_of_plots_to_draw = 0;
        index = [];
        for i=1:n_varobs
953
            if max(abs(measurement_error)) > 0.000000001
954
955
956
957
958
959
                number_of_plots_to_draw = number_of_plots_to_draw + 1;
                index = cat(1,index,i);
            end
            eval(['oo_.SmoothedMeasurementErrors.' deblank(options_.varobs(i,:)) ...
                  ' = measurement_error(i,:)'';']);
        end
960
961
        if ~options_.nograph
            [nbplt,nr,nc,lr,lc,nstar] = pltorg(number_of_plots_to_draw);
962
            if options_.TeX
963
964
965
966
                fidTeX = fopen([M_.fname '_SmoothedObservationErrors.TeX'],'w');
                fprintf(fidTeX,'%% TeX eps-loader file generated by dynare_estimation.m (Dynare).\n');
                fprintf(fidTeX,['%% ' datestr(now,0) '\n']);
                fprintf(fidTeX,' \n');
967
            end
968
            for plt = 1:nbplt
969
                fh = dyn_figure(options_,'Name','Smoothed observation errors');
970
971
                NAMES = [];
                if options_.TeX, TeXNAMES = []; end
972
                nstar0=min(nstar,number_of_plots_to_draw-(nbplt-1)*nstar);
973
974
975
976
977
978
979
                if gend==1
                    marker_string{1,1}='-ro';
                    marker_string{2,1}='-ko';
                else
                    marker_string{1,1}='-r';
                    marker_string{2,1}='-k';
                end
980
                for i=1:nstar0
981
982
                    k = (plt-1)*nstar+i;
                    subplot(nr,nc,i);
983
                    plot([1 gend],[0 0],marker_string{1,1},'linewidth',.5)
984
                    hold on
985
                    plot(1:gend,measurement_error(index(k),:),marker_string{2,1},'linewidth',1)
986
                    hold off