diff --git a/matlab/optimization/gmhmaxlik_core.m b/matlab/optimization/gmhmaxlik_core.m
index cf5fb7774b9ba676fd338cf584a1bd6b87cdace9..a35bf3387b5095193665ab373598cc7b685523ed 100644
--- a/matlab/optimization/gmhmaxlik_core.m
+++ b/matlab/optimization/gmhmaxlik_core.m
@@ -1,4 +1,5 @@
 function [PostMod,PostVar,Scale,PostMean] = gmhmaxlik_core(ObjFun,xparam1,mh_bounds,options,iScale,info,MeanPar,VarCov,varargin)
+% function [PostMod,PostVar,Scale,PostMean] = gmhmaxlik_core(ObjFun,xparam1,mh_bounds,options,iScale,info,MeanPar,VarCov,varargin)
 
 % (Dirty) Global minimization routine of (minus) a likelihood (or posterior density) function.
 %
@@ -23,7 +24,7 @@ function [PostMod,PostVar,Scale,PostMean] = gmhmaxlik_core(ObjFun,xparam1,mh_bou
 %
 % ALGORITHM
 %   Metropolis-Hastings with an constantly updated covariance matrix for
-%   the jump distribution. The posterior mean, variance and mode are
+%   the jump distribution. The posterior mean, variance, and mode are
 %   updated (in step 2) with the following rules:
 %
 %   \[
@@ -31,8 +32,14 @@ function [PostMod,PostVar,Scale,PostMean] = gmhmaxlik_core(ObjFun,xparam1,mh_bou
 %   \]
 %
 %   \[
-%       \Sigma_t = \Sigma_{t-1} + \mu_{t-1}\mu_{t-1}'-\mu_{t}\mu_{t}' +
-%                  \frac{1}{t}\left(\theta_t\theta_t'-\Sigma_{t-1}-\mu_{t-1}\mu_{t-1}'\right)
+%       SSR_t=\frac{SSR_t}{t-1}
+%   \]
+%
+%   with
+%
+%   \[
+%       SSR_t = SSR_{t-1} +
+%               \left(\theta_t-\mu_{t-1}\right)\left(\theta_t-\mu_{t}\right)'
 %   \]
 %
 %   and
@@ -48,15 +55,17 @@ function [PostMod,PostVar,Scale,PostMean] = gmhmaxlik_core(ObjFun,xparam1,mh_bou
 %
 %   where $t$ is the iteration, $\mu_t$ the estimate of the posterior mean
 %   after $t$ iterations, $\Sigma_t$ the estimate of the posterior
-%   covariance matrix after $t$ iterations, $\mathrm{mode}_t$ is the
+%   covariance matrix after $t$ iterations, $SSR$ the sum of squares, $\mathrm{mode}_t$ is the
 %   evaluation of the posterior mode after $t$ iterations and
 %   $p(\theta_t|\mathcal Y)$ is the posterior density of parameters
-%   (specified by the user supplied function "fun").
+%   (specified by the user supplied function "fun"). The covariance update
+%   follows Welford's online algorithm
+%   (https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance)
 %
 % SPECIAL REQUIREMENTS
 %   None.
 
-% Copyright © 2006-2017 Dynare Team
+% Copyright (C) 2006-2022 Dynare Team
 %
 % This file is part of Dynare.
 %
@@ -148,6 +157,8 @@ set(hh,'Name','Estimation of the posterior covariance...'),
 j = 1;
 isux = 0;
 ilogpo2 = - feval(ObjFun,ix2,varargin{:});
+SSR=zeros(npar,npar); 
+MeanPar=zeros(npar,1); %initialize (drops out later)
 while j<= NumberOfIterations
     proposal = iScale*dd*randn(npar,1) + ix2;
     if all(proposal > mh_bounds(:,1)) && all(proposal < mh_bounds(:,2))
@@ -173,10 +184,10 @@ while j<= NumberOfIterations
     % I update the covariance matrix and the mean:
     oldMeanPar = MeanPar;
     MeanPar = oldMeanPar + (1/j)*(ix2-oldMeanPar);
-    CovJump = CovJump + oldMeanPar*oldMeanPar' - MeanPar*MeanPar' + ...
-              (1/j)*(ix2*ix2' - CovJump - oldMeanPar*oldMeanPar');
+    SSR=SSR+(ix2-MeanPar)*(ix2-oldMeanPar)';
     j = j+1;
 end
+CovJump=SSR/(NumberOfIterations-1);
 dyn_waitbar_close(hh);
 PostVar = CovJump;
 PostMean = MeanPar;