dsge_likelihood.m 29.3 KB
Newer Older
1
function [fval,DLIK,Hess,exit_flag,ys,trend_coeff,info,Model,DynareOptions,BayesInfo,DynareResults] = dsge_likelihood(xparam1,DynareDataset,DynareOptions,Model,EstimatedParameters,BayesInfo,DynareResults,derivatives_info)
2
3
4
% Evaluates the posterior kernel of a dsge model.

%@info:
Stéphane Adjemian's avatar
Stéphane Adjemian committed
5
%! @deftypefn {Function File} {[@var{fval},@var{exit_flag},@var{ys},@var{trend_coeff},@var{info},@var{Model},@var{DynareOptions},@var{BayesInfo},@var{DynareResults},@var{DLIK},@var{AHess}] =} dsge_likelihood (@var{xparam1},@var{DynareDataset},@var{DynareOptions},@var{Model},@var{EstimatedParameters},@var{BayesInfo},@var{DynareResults},@var{derivatives_flag})
6
%! @anchor{dsge_likelihood}
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
%! @sp 1
%! Evaluates the posterior kernel of a dsge model.
%! @sp 2
%! @strong{Inputs}
%! @sp 1
%! @table @ @var
%! @item xparam1
%! Vector of doubles, current values for the estimated parameters.
%! @item DynareDataset
%! Matlab's structure describing the dataset (initialized by dynare, see @ref{dataset_}).
%! @item DynareOptions
%! Matlab's structure describing the options (initialized by dynare, see @ref{options_}).
%! @item Model
%! Matlab's structure describing the Model (initialized by dynare, see @ref{M_}).
%! @item EstimatedParamemeters
%! Matlab's structure describing the estimated_parameters (initialized by dynare, see @ref{estim_params_}).
%! @item BayesInfo
%! Matlab's structure describing the priors (initialized by dynare, see @ref{bayesopt_}).
%! @item DynareResults
%! Matlab's structure gathering the results (initialized by dynare, see @ref{oo_}).
%! @item derivates_flag
%! Integer scalar, flag for analytical derivatives of the likelihood.
%! @end table
%! @sp 2
%! @strong{Outputs}
%! @sp 1
%! @table @ @var
%! @item fval
%! Double scalar, value of (minus) the likelihood.
%! @item exit_flag
%! Integer scalar, equal to zero if the routine return with a penalty (one otherwise).
%! @item ys
%! Vector of doubles, steady state level for the endogenous variables.
%! @item trend_coeffs
%! Matrix of doubles, coefficients of the deterministic trend in the measurement equation.
%! @item info
%! Integer scalar, error code.
%! @table @ @code
%! @item info==0
%! No error.
%! @item info==1
%! The model doesn't determine the current variables uniquely.
%! @item info==2
%! MJDGGES returned an error code.
%! @item info==3
%! Blanchard & Kahn conditions are not satisfied: no stable equilibrium.
%! @item info==4
%! Blanchard & Kahn conditions are not satisfied: indeterminacy.
%! @item info==5
%! Blanchard & Kahn conditions are not satisfied: indeterminacy due to rank failure.
%! @item info==6
%! The jacobian evaluated at the deterministic steady state is complex.
%! @item info==19
%! The steadystate routine thrown an exception (inconsistent deep parameters).
%! @item info==20
%! Cannot find the steady state, info(2) contains the sum of square residuals (of the static equations).
%! @item info==21
%! The steady state is complex, info(2) contains the sum of square of imaginary parts of the steady state.
%! @item info==22
%! The steady has NaNs.
%! @item info==23
%! M_.params has been updated in the steadystate routine and has complex valued scalars.
%! @item info==24
%! M_.params has been updated in the steadystate routine and has some NaNs.
%! @item info==30
%! Ergodic variance can't be computed.
%! @item info==41
%! At least one parameter is violating a lower bound condition.
%! @item info==42
%! At least one parameter is violating an upper bound condition.
%! @item info==43
%! The covariance matrix of the structural innovations is not positive definite.
%! @item info==44
%! The covariance matrix of the measurement errors is not positive definite.
%! @item info==45
%! Likelihood is not a number (NaN).
83
%! @item info==46
84
%! Likelihood is a complex valued number.
85
86
87
88
%! @item info==47
%! Posterior kernel is not a number (logged prior density is NaN)
%! @item info==48
%! Posterior kernel is a complex valued number (logged prior density is complex).
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
%! @end table
%! @item Model
%! Matlab's structure describing the model (initialized by dynare, see @ref{M_}).
%! @item DynareOptions
%! Matlab's structure describing the options (initialized by dynare, see @ref{options_}).
%! @item BayesInfo
%! Matlab's structure describing the priors (initialized by dynare, see @ref{bayesopt_}).
%! @item DynareResults
%! Matlab's structure gathering the results (initialized by dynare, see @ref{oo_}).
%! @item DLIK
%! Vector of doubles, score of the likelihood.
%! @item AHess
%! Matrix of doubles, asymptotic hessian matrix.
%! @end table
%! @sp 2
%! @strong{This function is called by:}
%! @sp 1
%! @ref{dynare_estimation_1}, @ref{mode_check}
%! @sp 2
%! @strong{This function calls:}
%! @sp 1
110
%! @ref{dynare_resolve}, @ref{lyapunov_symm}, @ref{schur_statespace_transformation}, @ref{kalman_filter_d}, @ref{missing_observations_kalman_filter_d}, @ref{univariate_kalman_filter_d}, @ref{kalman_steady_state}, @ref{getH}, @ref{kalman_filter}, @ref{score}, @ref{AHessian}, @ref{missing_observations_kalman_filter}, @ref{univariate_kalman_filter}, @ref{priordens}
111
112
%! @end deftypefn
%@eod:
113

Sébastien Villemot's avatar
Sébastien Villemot committed
114
% Copyright (C) 2004-2013 Dynare Team
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
%
% This file is part of Dynare.
%
% Dynare is free software: you can redistribute it and/or modify
% it under the terms of the GNU General Public License as published by
% the Free Software Foundation, either version 3 of the License, or
% (at your option) any later version.
%
% Dynare is distributed in the hope that it will be useful,
% but WITHOUT ANY WARRANTY; without even the implied warranty of
% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
% GNU General Public License for more details.
%
% You should have received a copy of the GNU General Public License
% along with Dynare.  If not, see <http://www.gnu.org/licenses/>.

131
132
% AUTHOR(S) stephane DOT adjemian AT univ DASH lemans DOT FR

133
global objective_function_penalty_base
134

135
136
137
138
139
140
% Initialization of the returned variables and others...
fval        = [];
ys          = [];
trend_coeff = [];
exit_flag   = 1;
info        = 0;
141
singularity_flag = 0;
142
DLIK        = [];
143
Hess       = [];
144
145
146
147
148

if DynareOptions.estimation_dll
    [fval,exit_flag,ys,trend_coeff,info,params,H,Q] ...
        = logposterior(xparam1,DynareDataset, DynareOptions,Model, ...
                          EstimatedParameters,BayesInfo,DynareResults);
149
    mexErrCheck('logposterior', exit_flag);
150
151
152
153
154
155
156
157
    Model.params = params;
    if ~isequal(Model.H,0)
        Model.H = H;
    end
    Model.Sigma_e = Q;
    DynareResults.dr.ys = ys;
    return
end
158

159
% Set flag related to analytical derivatives.
160
analytic_derivation = DynareOptions.analytic_derivation;
161
162
163
164
165

if analytic_derivation && DynareOptions.loglinear
    error('The analytic_derivation and loglinear options are not compatible')
end

166
if nargout==1,
167
    analytic_derivation=0;
168
end
169

170
171
172
173
if analytic_derivation,
    kron_flag=DynareOptions.analytic_derivation_mode;
end

174
175
176
%------------------------------------------------------------------------------
% 1. Get the structural parameters & define penalties
%------------------------------------------------------------------------------
177
178
179
180

% Return, with endogenous penalty, if some parameters are smaller than the lower bound of the prior domain.
if ~isequal(DynareOptions.mode_compute,1) && any(xparam1<BayesInfo.lb)
    k = find(xparam1<BayesInfo.lb);
181
    fval = objective_function_penalty_base+sum((BayesInfo.lb(k)-xparam1(k)).^2);
182
    exit_flag = 0;
183
    info = 41;
184
185
186
    if analytic_derivation,
        DLIK=ones(length(xparam1),1);
    end
187
    return
188
end
189
190
191
192

% Return, with endogenous penalty, if some parameters are greater than the upper bound of the prior domain.
if ~isequal(DynareOptions.mode_compute,1) && any(xparam1>BayesInfo.ub)
    k = find(xparam1>BayesInfo.ub);
193
    fval = objective_function_penalty_base+sum((xparam1(k)-BayesInfo.ub(k)).^2);
194
    exit_flag = 0;
195
    info = 42;
196
197
198
    if analytic_derivation,
        DLIK=ones(length(xparam1),1);
    end
199
    return
200
end
201
202

% Get the diagonal elements of the covariance matrices for the structural innovations (Q) and the measurement error (H).
203
204
Model = set_all_parameters(xparam1,EstimatedParameters,Model);

205
206
207
Q = Model.Sigma_e;
H = Model.H;

208
% Test if Q is positive definite.
209
210
211
212
213
214
215
if ~issquare(Q) && EstimatedParameters.ncx
    [Q_is_positive_definite, penalty] = ispd(Q);
    if ~Q_is_positive_definite
        fval = objective_function_penalty_base+penalty;
        exit_flag = 0;
        info = 43;
        return
michel's avatar
michel committed
216
    end
217
end
218

219
% Test if H is positive definite.
220
221
222
223
224
225
226
if ~issquare(H) && EstimatedParameters.ncn
    [H_is_positive_definite, penalty] = ispd(H);
    if ~H_is_positive_definite
        fval = objective_function_penalty_base+penalty;
        exit_flag = 0;
        info = 44;
        return
michel's avatar
michel committed
227
    end
228
end
229
230


231
232
233
%------------------------------------------------------------------------------
% 2. call model setup & reduction program
%------------------------------------------------------------------------------
234

235
% Linearize the model around the deterministic sdteadystate and extract the matrices of the state equation (T and R).
236
[T,R,SteadyState,info,Model,DynareOptions,DynareResults] = dynare_resolve(Model,DynareOptions,DynareResults,'restrict');
237
238

% Return, with endogenous penalty when possible, if dynare_resolve issues an error code (defined in resol).
239
if info(1) == 1 || info(1) == 2 || info(1) == 5 || info(1) == 7 || info(1) ...
240
            == 8 || info(1) == 22 || info(1) == 24 || info(1) == 19
241
    fval = objective_function_penalty_base+1;
242
243
    info = info(1);
    exit_flag = 0;
244
245
246
    if analytic_derivation,
        DLIK=ones(length(xparam1),1);
    end
247
    return
248
elseif info(1) == 3 || info(1) == 4 || info(1)==6 || info(1) == 20 || info(1) == 21  || info(1) == 23
249
    fval = objective_function_penalty_base+info(2);
250
251
    info = info(1);
    exit_flag = 0;
252
253
254
    if analytic_derivation,
        DLIK=ones(length(xparam1),1);
    end
255
256
    return
end
257

258
259
260
261
262
263
264
265
266
267
268
269
270
% check endogenous prior restrictions
info=endogenous_prior_restrictions(T,R,Model,DynareOptions,DynareResults);
if info(1),
    fval = objective_function_penalty_base+info(2);
    info = info(1);
    exit_flag = 0;
    if analytic_derivation,
        DLIK=ones(length(xparam1),1);
    end
    return
end
%

271
272
273
274
275
% Define a vector of indices for the observed variables. Is this really usefull?...
BayesInfo.mf = BayesInfo.mf1;

% Define the constant vector of the measurement equation.
if DynareOptions.noconstant
276
    constant = zeros(DynareDataset.info.nvobs,1);
277
278
279
else
    if DynareOptions.loglinear
        constant = log(SteadyState(BayesInfo.mfys));
280
    else
281
        constant = SteadyState(BayesInfo.mfys);
282
283
    end
end
284
285
286

% Define the deterministic linear trend of the measurement equation.
if BayesInfo.with_trend
287
    trend_coeff = zeros(DynareDataset.info.nvobs,1);
288
    t = DynareOptions.trend_coeffs;
289
    for i=1:length(t)
290
291
292
        if ~isempty(t{i})
            trend_coeff(i) = evalin('base',t{i});
        end
michel's avatar
michel committed
293
    end
294
    trend = repmat(constant,1,DynareDataset.info.ntobs)+trend_coeff*[1:DynareDataset.info.ntobs];
295
else
296
    trend = repmat(constant,1,DynareDataset.info.ntobs);
297
end
298
299
300
301
302
303
304
305
306
307
308
309

% Get needed informations for kalman filter routines.
start = DynareOptions.presample+1;
Z = BayesInfo.mf; % old mf
no_missing_data_flag = ~DynareDataset.missing.state;
mm = length(T); % old np
pp = DynareDataset.info.nvobs;
rr = length(Q);
kalman_tol = DynareOptions.kalman_tol;
riccati_tol = DynareOptions.riccati_tol;
Y   = DynareDataset.data-trend;

310
311
312
%------------------------------------------------------------------------------
% 3. Initial condition of the Kalman filter
%------------------------------------------------------------------------------
313
kalman_algo = DynareOptions.kalman_algo;
314
315
316
317

% resetting measurement errors covariance matrix for univariate filters
if (kalman_algo == 2) || (kalman_algo == 4)
    if isequal(H,0)
318
        H = zeros(pp,1);
319
        mmm = mm;
320
321
322
    else
        if all(all(abs(H-diag(diag(H)))<1e-14))% ie, the covariance matrix is diagonal...
            H = diag(H);
323
            mmm = mm;
324
        else
325
326
327
328
329
330
            Z = [Z, eye(pp)];
            T = blkdiag(T,zeros(pp));
            Q = blkdiag(Q,H);
            R = blkdiag(R,eye(pp));
            Pstar = blkdiag(Pstar,H);
            Pinf  = blckdiag(Pinf,zeros(pp));
331
            H = zeros(pp,1);
332
            mmm   = mm+pp;
333
334
335
336
337
        end
    end
end


338
diffuse_periods = 0;
339
correlated_errors_have_been_checked = 0;
340
singular_diffuse_filter = 0;
341
342
343
344
switch DynareOptions.lik_init
  case 1% Standard initialization with the steady state of the state equation.
    if kalman_algo~=2
        % Use standard kalman filter except if the univariate filter is explicitely choosen.
345
346
        kalman_algo = 1;
    end
347
    if DynareOptions.lyapunov_fp == 1
348
349
350
351
352
        Pstar = lyapunov_symm(T,Q,DynareOptions.lyapunov_fixed_point_tol,DynareOptions.lyapunov_complex_threshold, 3, R);
    elseif DynareOptions.lyapunov_db == 1
        Pstar = disclyap_fast(T,R*Q*R',DynareOptions.lyapunov_doubling_tol);
    elseif DynareOptions.lyapunov_srs == 1
        Pstar = lyapunov_symm(T,Q,DynareOptions.lyapunov_fixed_point_tol,DynareOptions.lyapunov_complex_threshold, 4, R);
353
354
355
    else
        Pstar = lyapunov_symm(T,R*Q*R',DynareOptions.qz_criterium,DynareOptions.lyapunov_complex_threshold);
    end;
356
357
358
359
    Pinf  = [];
    a     = zeros(mm,1);
    Zflag = 0;
  case 2% Initialization with large numbers on the diagonal of the covariance matrix if the states (for non stationary models).
360
    if kalman_algo ~= 2
361
        % Use standard kalman filter except if the univariate filter is explicitely choosen.
362
363
        kalman_algo = 1;
    end
364
365
366
367
368
369
    Pstar = DynareOptions.Harvey_scale_factor*eye(mm);
    Pinf  = [];
    a     = zeros(mm,1);
    Zflag = 0;
  case 3% Diffuse Kalman filter (Durbin and Koopman)
        % Use standard kalman filter except if the univariate filter is explicitely choosen.
370
    if kalman_algo == 0
371
        kalman_algo = 3;
372
    elseif ~((kalman_algo == 3) || (kalman_algo == 4))
373
374
            error(['diffuse filter: options_.kalman_algo can only be equal ' ...
                   'to 0 (default), 3 or 4'])
375
    end
376

377
378
379
380
381
382
    [Z,T,R,QT,Pstar,Pinf] = schur_statespace_transformation(Z,T,R,Q,DynareOptions.qz_criterium);
    Zflag = 1;
    % Run diffuse kalman filter on first periods.
    if (kalman_algo==3)
        % Multivariate Diffuse Kalman Filter
        if no_missing_data_flag
383
            [dLIK,dlik,a,Pstar] = kalman_filter_d(Y, 1, size(Y,2), ...
384
385
386
387
                                                       zeros(mm,1), Pinf, Pstar, ...
                                                       kalman_tol, riccati_tol, DynareOptions.presample, ...
                                                       T,R,Q,H,Z,mm,pp,rr);
        else
388
            [dLIK,dlik,a,Pstar] = missing_observations_kalman_filter_d(DynareDataset.missing.aindex,DynareDataset.missing.number_of_observations,DynareDataset.missing.no_more_missing_observations, ...
389
390
391
392
393
                                                              Y, 1, size(Y,2), ...
                                                              zeros(mm,1), Pinf, Pstar, ...
                                                              kalman_tol, riccati_tol, DynareOptions.presample, ...
                                                              T,R,Q,H,Z,mm,pp,rr);
        end
394
        diffuse_periods = length(dlik);
395
396
        if isinf(dLIK)
            % Go to univariate diffuse filter if singularity problem.
397
            singular_diffuse_filter = 1;
398
399
        end
    end
400
    if singular_diffuse_filter || (kalman_algo==4)
401
        % Univariate Diffuse Kalman Filter
402
        if isequal(H,0)
403
            H1 = zeros(pp,1);
404
405
406
            mmm = mm;
        else
            if all(all(abs(H-diag(diag(H)))<1e-14))% ie, the covariance matrix is diagonal...
407
                H1 = diag(H);
408
                mmm = mm;
409
            else
410
411
412
413
414
415
                Z = [Z, eye(pp)];
                T = blkdiag(T,zeros(pp));
                Q = blkdiag(Q,H);
                R = blkdiag(R,eye(pp));
                Pstar = blkdiag(Pstar,H);
                Pinf  = blckdiag(Pinf,zeros(pp));
416
                H1 = zeros(pp,1);
417
                mmm   = mm+pp;
418
419
            end
        end
420
421
422
        % no need to test again for correlation elements
        correlated_errors_have_been_checked = 1;

423
        [dLIK,dlik,a,Pstar] = univariate_kalman_filter_d(DynareDataset.missing.aindex,...
424
425
426
427
428
                                                        DynareDataset.missing.number_of_observations,...
                                                        DynareDataset.missing.no_more_missing_observations, ...
                                                        Y, 1, size(Y,2), ...
                                                        zeros(mmm,1), Pinf, Pstar, ...
                                                        kalman_tol, riccati_tol, DynareOptions.presample, ...
429
                                                        T,R,Q,H1,Z,mmm,pp,rr);
430
        diffuse_periods = length(dlik);
431
    end
432
433
    if isnan(dLIK),
        info = 45;
434
        fval = objective_function_penalty_base + 100;
435
436
437
438
        exit_flag = 0;
        return
    end
    
439
  case 4% Start from the solution of the Riccati equation.
440
    if kalman_algo ~= 2
441
442
        kalman_algo = 1;
    end
443
    if isequal(H,0)
444
        [err,Pstar] = kalman_steady_state(transpose(T),R*Q*transpose(R),transpose(build_selection_matrix(Z,mm,length(Z))));
445
    else
446
        [err,Pstar] = kalman_steady_state(transpose(T),R*Q*transpose(R),transpose(build_selection_matrix(Z,mm,length(Z))),H);
447
448
    end
    if err
449
        disp(['dsge_likelihood:: I am not able to solve the Riccati equation, so I switch to lik_init=1!']);
450
451
        DynareOptions.lik_init = 1;
        Pstar = lyapunov_symm(T,R*Q*R',DynareOptions.qz_criterium,DynareOptions.lyapunov_complex_threshold);
452
    end
453
    Pinf  = [];
454
455
    a = zeros(mm,1);
    Zflag = 0;
456
  otherwise
457
    error('dsge_likelihood:: Unknown initialization approach for the Kalman filter!')
458
end
459

460
461
462
463
464
465
if analytic_derivation,
    offset = EstimatedParameters.nvx;
    offset = offset+EstimatedParameters.nvn;
    offset = offset+EstimatedParameters.ncx;
    offset = offset+EstimatedParameters.ncn;

466
    no_DLIK = 0;
467
468
    full_Hess = analytic_derivation==2;
    asy_Hess = analytic_derivation==-2;
469
    outer_product_gradient = analytic_derivation==-1;
470
471
472
    if asy_Hess,
        analytic_derivation=1;
    end
473
474
475
    if outer_product_gradient,
        analytic_derivation=1;
    end
476
477
    DLIK = [];
    AHess = [];
478
    iv = DynareResults.dr.restrict_var_list;
479
    if nargin<8 || isempty(derivatives_info)
480
481
482
        [A,B,nou,nou,Model,DynareOptions,DynareResults] = dynare_resolve(Model,DynareOptions,DynareResults);
        if ~isempty(EstimatedParameters.var_exo)
            indexo=EstimatedParameters.var_exo(:,1);
483
484
485
        else
            indexo=[];
        end
486
487
        if ~isempty(EstimatedParameters.param_vals)
            indparam=EstimatedParameters.param_vals(:,1);
488
489
490
        else
            indparam=[];
        end
491
492

        if full_Hess,
493
494
            [dum, DT, DOm, DYss, dum2, D2T, D2Om, D2Yss] = getH(A, B, Model,DynareResults,DynareOptions,kron_flag,indparam,indexo,iv);
            clear dum dum2;
495
        else
496
            [dum, DT, DOm, DYss] = getH(A, B, Model,DynareResults,DynareOptions,kron_flag,indparam,indexo,iv);
497
        end
498
    else
499
500
501
        DT = derivatives_info.DT(iv,iv,:);
        DOm = derivatives_info.DOm(iv,iv,:);
        DYss = derivatives_info.DYss(iv,:);
502
503
504
505
506
507
508
509
510
        if isfield(derivatives_info,'full_Hess'),
            full_Hess = derivatives_info.full_Hess;
        end
        if full_Hess,
        D2T = derivatives_info.D2T;
        D2Om = derivatives_info.D2Om;
        D2Yss = derivatives_info.D2Yss;
        end
        if isfield(derivatives_info,'no_DLIK'),
511
512
            no_DLIK = derivatives_info.no_DLIK;
        end
513
        clear('derivatives_info');
514
    end
515
    DYss = [zeros(size(DYss,1),offset) DYss];
516
    DH=zeros([length(H),length(H),length(xparam1)]);
517
518
    DQ=zeros([size(Q),length(xparam1)]);
    DP=zeros([size(T),length(xparam1)]);
519
520
521
522
523
    if full_Hess,
        for j=1:size(D2Yss,1),
        tmp(j,:,:) = blkdiag(zeros(offset,offset), squeeze(D2Yss(j,:,:)));
        end
        D2Yss = tmp;
524
525
526
        D2H=sparse(size(D2Om,1),size(D2Om,2)); %zeros([size(H),length(xparam1),length(xparam1)]);
        D2P=sparse(size(D2Om,1),size(D2Om,2)); %zeros([size(T),length(xparam1),length(xparam1)]);
        jcount=0;
527
    end
528
    if DynareOptions.lik_init==1,
529
530
    for i=1:EstimatedParameters.nvx
        k =EstimatedParameters.var_exo(i,1);
531
        DQ(k,k,i) = 2*sqrt(Q(k,k));
532
        dum =  lyapunov_symm(T,DOm(:,:,i),DynareOptions.qz_criterium,DynareOptions.lyapunov_complex_threshold);
533
534
%         kk = find(abs(dum) < 1e-12);
%         dum(kk) = 0;
535
        DP(:,:,i)=dum;
536
537
        if full_Hess
        for j=1:i,
538
539
540
541
542
543
            jcount=jcount+1;
            dum =  lyapunov_symm(T,dyn_unvech(D2Om(:,jcount)),DynareOptions.qz_criterium,DynareOptions.lyapunov_complex_threshold);
%             kk = (abs(dum) < 1e-12);
%             dum(kk) = 0;
            D2P(:,jcount)=dyn_vech(dum);
%             D2P(:,:,j,i)=dum;
544
545
        end
        end
546
    end
547
    end
548
549
550
    offset = EstimatedParameters.nvx;
    for i=1:EstimatedParameters.nvn
        k = EstimatedParameters.var_endo(i,1);
551
        DH(k,k,i+offset) = 2*sqrt(H(k,k));
552
553
554
        if full_Hess
        D2H(k,k,i+offset,i+offset) = 2;
        end
555
    end
556
    offset = offset + EstimatedParameters.nvn;
557
    if DynareOptions.lik_init==1,
558
559
    for j=1:EstimatedParameters.np
        dum =  lyapunov_symm(T,DT(:,:,j+offset)*Pstar*T'+T*Pstar*DT(:,:,j+offset)'+DOm(:,:,j+offset),DynareOptions.qz_criterium,DynareOptions.lyapunov_complex_threshold);
560
561
%         kk = find(abs(dum) < 1e-12);
%         dum(kk) = 0;
562
        DP(:,:,j+offset)=dum;
563
564
565
        if full_Hess
        DTj = DT(:,:,j+offset);
        DPj = dum;
566
        for i=1:j+offset,
567
            jcount=jcount+1;
568
569
            DTi = DT(:,:,i);
            DPi = DP(:,:,i);
570
571
            D2Tij = reshape(D2T(:,jcount),size(T));
            D2Omij = dyn_unvech(D2Om(:,jcount));
572
573
            tmp = D2Tij*Pstar*T' + T*Pstar*D2Tij' + DTi*DPj*T' + DTj*DPi*T' + T*DPj*DTi' + T*DPi*DTj' + DTi*Pstar*DTj' + DTj*Pstar*DTi' + D2Omij;
            dum = lyapunov_symm(T,tmp,DynareOptions.qz_criterium,DynareOptions.lyapunov_complex_threshold);
574
575
576
%             dum(abs(dum)<1.e-12) = 0;
            D2P(:,jcount) = dyn_vech(dum);
%             D2P(:,:,j+offset,i) = dum;
577
578
        end
        end
579
    end
580
    end
581
    if analytic_derivation==1,
582
        analytic_deriv_info={analytic_derivation,DT,DYss,DOm,DH,DP,asy_Hess};
583
584
    else
        analytic_deriv_info={analytic_derivation,DT,DYss,DOm,DH,DP,D2T,D2Yss,D2Om,D2H,D2P};
585
        clear DT DYss DOm DH DP D2T D2Yss D2Om D2H D2P,
586
587
588
    end
else
    analytic_deriv_info={0};
589
590
end

591
592
593
%------------------------------------------------------------------------------
% 4. Likelihood evaluation
%------------------------------------------------------------------------------
594
595

if ((kalman_algo==1) || (kalman_algo==3))% Multivariate Kalman Filter
596
    if no_missing_data_flag
597
        if DynareOptions.block
598
            [err, LIK] = block_kalman_filter(T,R,Q,H,Pstar,Y,start,Z,kalman_tol,riccati_tol, Model.nz_state_var, Model.n_diag, Model.nobs_non_statevar);
599
            mexErrCheck('block_kalman_filter', err);
600
        else
601
            [LIK,lik] = kalman_filter(Y,diffuse_periods+1,size(Y,2), ...
Sébastien Villemot's avatar
Sébastien Villemot committed
602
603
604
                                a,Pstar, ...
                                kalman_tol, riccati_tol, ...
                                DynareOptions.presample, ...
605
                                T,Q,R,H,Z,mm,pp,rr,Zflag,diffuse_periods, ...
606
607
                                analytic_deriv_info{:});

608
        end
609
    else
610
611
612
613
614
        if 0 %DynareOptions.block
            [err, LIK,lik] = block_kalman_filter(DynareDataset.missing.aindex,DynareDataset.missing.number_of_observations,DynareDataset.missing.no_more_missing_observations,...
                                                                 T,R,Q,H,Pstar,Y,start,Z,kalman_tol,riccati_tol, Model.nz_state_var, Model.n_diag, Model.nobs_non_statevar);
        else
            [LIK,lik] = missing_observations_kalman_filter(DynareDataset.missing.aindex,DynareDataset.missing.number_of_observations,DynareDataset.missing.no_more_missing_observations,Y,diffuse_periods+1,size(Y,2), ...
615
616
617
618
                                               a, Pstar, ...
                                               kalman_tol, DynareOptions.riccati_tol, ...
                                               DynareOptions.presample, ...
                                               T,Q,R,H,Z,mm,pp,rr,Zflag,diffuse_periods);
619
        end
620
    end
621
622
623
    if analytic_derivation,
        LIK1=LIK;
        LIK=LIK1{1};
624
625
        lik1=lik;
        lik=lik1{1};
626
    end
627
    if isinf(LIK)
628
629
630
631
632
633
        if DynareOptions.use_univariate_filters_if_singularity_is_detected
            if kalman_algo == 1
                kalman_algo = 2;
            else
                kalman_algo = 4;
            end
634
        else
635
636
637
638
639
640
            if isinf(LIK)
                info = 66;
                fval = objective_function_penalty_base+1;
                exit_flag = 0;
                return
            end
641
        end
642
    else
643
644
        if DynareOptions.lik_init==3
            LIK = LIK + dLIK;
645
646
647
            if analytic_derivation==0 && nargout==2,
                lik = [dlik; lik];
            end
648
649
650
        end
    end
end
651

652
if (kalman_algo==2) || (kalman_algo==4)
653
    % Univariate Kalman Filter
654
    % resetting measurement error covariance matrix when necessary                                                           %
655
    if ~correlated_errors_have_been_checked
656
        if isequal(H,0)
657
            H1 = zeros(pp,1);
658
            mmm = mm;
659
660
661
            if analytic_derivation,
                DH = zeros(pp,length(xparam1));
            end
662
663
        else
            if all(all(abs(H-diag(diag(H)))<1e-14))% ie, the covariance matrix is diagonal...
664
                H1 = diag(H);
665
                mmm = mm;
666
                clear tmp
667
668
669
670
671
672
                if analytic_derivation,
                    for j=1:pp,
                        tmp(j,:)=DH(j,j,:);
                    end
                    DH=tmp;
                end
673
            else
674
675
676
677
678
679
                Z = [Z, eye(pp)];
                T = blkdiag(T,zeros(pp));
                Q = blkdiag(Q,H);
                R = blkdiag(R,eye(pp));
                Pstar = blkdiag(Pstar,H);
                Pinf  = blckdiag(Pinf,zeros(pp));
680
                H1 = zeros(pp,1);
681
                mmm   = mm+pp;
682
683
            end
        end
684
685
686
        if analytic_derivation,
            analytic_deriv_info{5}=DH;
        end
687
    end
688

689
    [LIK, lik] = univariate_kalman_filter(DynareDataset.missing.aindex,DynareDataset.missing.number_of_observations,DynareDataset.missing.no_more_missing_observations,Y,diffuse_periods+1,size(Y,2), ...
690
691
692
693
                                       a,Pstar, ...
                                       DynareOptions.kalman_tol, ...
                                       DynareOptions.riccati_tol, ...
                                       DynareOptions.presample, ...
694
                                       T,Q,R,H1,Z,mmm,pp,rr,Zflag,diffuse_periods,analytic_deriv_info{:});
695
696
697
    if analytic_derivation,
        LIK1=LIK;
        LIK=LIK1{1};
698
699
        lik1=lik;
        lik=lik1{1};
700
    end
701
702
    if DynareOptions.lik_init==3
        LIK = LIK+dLIK;
703
704
705
        if analytic_derivation==0 && nargout==2,
            lik = [dlik; lik];
        end
706
707
    end
end
708

709
710
711
712
713
if analytic_derivation
    if no_DLIK==0
        DLIK = LIK1{2};
        %                 [DLIK] = score(T,R,Q,H,Pstar,Y,DT,DYss,DOm,DH,DP,start,Z,kalman_tol,riccati_tol);
    end
714
    if full_Hess ,
715
716
717
718
719
        Hess = -LIK1{3};
        %                     [Hess, DLL] = get_Hessian(T,R,Q,H,Pstar,Y,DT,DYss,DOm,DH,DP,D2T,D2Yss,D2Om,D2H,D2P,start,Z,kalman_tol,riccati_tol);
        %                     Hess0 = getHessian(Y,T,DT,D2T, R*Q*transpose(R),DOm,D2Om,Z,DYss,D2Yss);
    end
    if asy_Hess,
720
721
722
%         if ~((kalman_algo==2) || (kalman_algo==4)),
%             [Hess] = AHessian(T,R,Q,H,Pstar,Y,DT,DYss,DOm,DH,DP,start,Z,kalman_tol,riccati_tol);
%         else
723
        Hess = LIK1{3};
724
%         end
725
726
727
    end
end

728
if isnan(LIK)
729
    info = 45;
730
    fval = objective_function_penalty_base + 100;
731
    exit_flag = 0;
732
733
    return
end
734

735
if imag(LIK)~=0
736
    info = 46;
737
    fval = objective_function_penalty_base + 100;
738
739
    exit_flag = 0;
    return
740
end
741

742
743
likelihood = LIK;

744
% ------------------------------------------------------------------------------
745
% 5. Adds prior if necessary
746
% ------------------------------------------------------------------------------
747
748
749
if analytic_derivation
    if full_Hess,
        [lnprior, dlnprior, d2lnprior] = priordens(xparam1,BayesInfo.pshape,BayesInfo.p6,BayesInfo.p7,BayesInfo.p3,BayesInfo.p4);
750
        Hess = Hess - d2lnprior;
751
752
753
754
755
756
    else
        [lnprior, dlnprior] = priordens(xparam1,BayesInfo.pshape,BayesInfo.p6,BayesInfo.p7,BayesInfo.p3,BayesInfo.p4);
    end
    if no_DLIK==0
        DLIK = DLIK - dlnprior';
    end
757
758
759
760
761
    if outer_product_gradient,
        dlik = lik1{2};
        dlik=[- dlnprior; dlik(start:end,:)];
        Hess = dlik'*dlik;
    end
762
763
764
else
    lnprior = priordens(xparam1,BayesInfo.pshape,BayesInfo.p6,BayesInfo.p7,BayesInfo.p3,BayesInfo.p4);
end
Johannes Pfeifer 's avatar
Johannes Pfeifer committed
765
if DynareOptions.endogenous_prior==1
766
767
768
769
770
771
  if DynareOptions.lik_init==2 || DynareOptions.lik_init==3
    error('Endogenous prior not supported with non-stationary models')
  else
    [lnpriormom]  = endogenous_prior(Y,Pstar,BayesInfo,H);
    fval    = (likelihood-lnprior-lnpriormom);
  end
Johannes Pfeifer 's avatar
Johannes Pfeifer committed
772
773
774
else
  fval    = (likelihood-lnprior);
end
775

776
777
if isnan(fval)
    info = 47;
778
    fval = objective_function_penalty_base + 100;
779
780
781
782
783
784
    exit_flag = 0;
    return
end

if imag(fval)~=0
    info = 48;
785
    fval = objective_function_penalty_base + 100;
786
787
788
789
    exit_flag = 0;
    return
end

790
791
792
% Update DynareOptions.kalman_algo.
DynareOptions.kalman_algo = kalman_algo;

793
794
795
if analytic_derivation==0 && nargout==2,
    lik=lik(start:end,:);
    DLIK=[-lnprior; lik(:)];
796
end