Salome HOME
Documentation and C*KF use improvements
[modules/adao.git] / src / daComposant / daCore / NumericObjects.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2021 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 __doc__ = """
24     Définit les objets numériques génériques.
25 """
26 __author__ = "Jean-Philippe ARGAUD"
27
28 import os, time, copy, types, sys, logging
29 import math, numpy, scipy, scipy.optimize, scipy.version
30 from daCore.BasicObjects import Operator
31 from daCore.PlatformInfo import PlatformInfo
32 mpr = PlatformInfo().MachinePrecision()
33 mfp = PlatformInfo().MaximumPrecision()
34 # logging.getLogger().setLevel(logging.DEBUG)
35
36 # ==============================================================================
37 def ExecuteFunction( triplet ):
38     assert len(triplet) == 3, "Incorrect number of arguments"
39     X, xArgs, funcrepr = triplet
40     __X = numpy.asmatrix(numpy.ravel( X )).T
41     __sys_path_tmp = sys.path ; sys.path.insert(0,funcrepr["__userFunction__path"])
42     __module = __import__(funcrepr["__userFunction__modl"], globals(), locals(), [])
43     __fonction = getattr(__module,funcrepr["__userFunction__name"])
44     sys.path = __sys_path_tmp ; del __sys_path_tmp
45     if isinstance(xArgs, dict):
46         __HX  = __fonction( __X, **xArgs )
47     else:
48         __HX  = __fonction( __X )
49     return numpy.ravel( __HX )
50
51 # ==============================================================================
52 class FDApproximation(object):
53     """
54     Cette classe sert d'interface pour définir les opérateurs approximés. A la
55     création d'un objet, en fournissant une fonction "Function", on obtient un
56     objet qui dispose de 3 méthodes "DirectOperator", "TangentOperator" et
57     "AdjointOperator". On contrôle l'approximation DF avec l'incrément
58     multiplicatif "increment" valant par défaut 1%, ou avec l'incrément fixe
59     "dX" qui sera multiplié par "increment" (donc en %), et on effectue de DF
60     centrées si le booléen "centeredDF" est vrai.
61     """
62     def __init__(self,
63             name                  = "FDApproximation",
64             Function              = None,
65             centeredDF            = False,
66             increment             = 0.01,
67             dX                    = None,
68             extraArguments        = None,
69             avoidingRedundancy    = True,
70             toleranceInRedundancy = 1.e-18,
71             lenghtOfRedundancy    = -1,
72             mpEnabled             = False,
73             mpWorkers             = None,
74             mfEnabled             = False,
75             ):
76         self.__name = str(name)
77         self.__extraArgs = extraArguments
78         if mpEnabled:
79             try:
80                 import multiprocessing
81                 self.__mpEnabled = True
82             except ImportError:
83                 self.__mpEnabled = False
84         else:
85             self.__mpEnabled = False
86         self.__mpWorkers = mpWorkers
87         if self.__mpWorkers is not None and self.__mpWorkers < 1:
88             self.__mpWorkers = None
89         logging.debug("FDA Calculs en multiprocessing : %s (nombre de processus : %s)"%(self.__mpEnabled,self.__mpWorkers))
90         #
91         if mfEnabled:
92             self.__mfEnabled = True
93         else:
94             self.__mfEnabled = False
95         logging.debug("FDA Calculs en multifonctions : %s"%(self.__mfEnabled,))
96         #
97         if avoidingRedundancy:
98             self.__avoidRC = True
99             self.__tolerBP = float(toleranceInRedundancy)
100             self.__lenghtRJ = int(lenghtOfRedundancy)
101             self.__listJPCP = [] # Jacobian Previous Calculated Points
102             self.__listJPCI = [] # Jacobian Previous Calculated Increment
103             self.__listJPCR = [] # Jacobian Previous Calculated Results
104             self.__listJPPN = [] # Jacobian Previous Calculated Point Norms
105             self.__listJPIN = [] # Jacobian Previous Calculated Increment Norms
106         else:
107             self.__avoidRC = False
108         #
109         if self.__mpEnabled:
110             if isinstance(Function,types.FunctionType):
111                 logging.debug("FDA Calculs en multiprocessing : FunctionType")
112                 self.__userFunction__name = Function.__name__
113                 try:
114                     mod = os.path.join(Function.__globals__['filepath'],Function.__globals__['filename'])
115                 except:
116                     mod = os.path.abspath(Function.__globals__['__file__'])
117                 if not os.path.isfile(mod):
118                     raise ImportError("No user defined function or method found with the name %s"%(mod,))
119                 self.__userFunction__modl = os.path.basename(mod).replace('.pyc','').replace('.pyo','').replace('.py','')
120                 self.__userFunction__path = os.path.dirname(mod)
121                 del mod
122                 self.__userOperator = Operator( name = self.__name, fromMethod = Function, avoidingRedundancy = self.__avoidRC, inputAsMultiFunction = self.__mfEnabled, extraArguments = self.__extraArgs )
123                 self.__userFunction = self.__userOperator.appliedTo # Pour le calcul Direct
124             elif isinstance(Function,types.MethodType):
125                 logging.debug("FDA Calculs en multiprocessing : MethodType")
126                 self.__userFunction__name = Function.__name__
127                 try:
128                     mod = os.path.join(Function.__globals__['filepath'],Function.__globals__['filename'])
129                 except:
130                     mod = os.path.abspath(Function.__func__.__globals__['__file__'])
131                 if not os.path.isfile(mod):
132                     raise ImportError("No user defined function or method found with the name %s"%(mod,))
133                 self.__userFunction__modl = os.path.basename(mod).replace('.pyc','').replace('.pyo','').replace('.py','')
134                 self.__userFunction__path = os.path.dirname(mod)
135                 del mod
136                 self.__userOperator = Operator( name = self.__name, fromMethod = Function, avoidingRedundancy = self.__avoidRC, inputAsMultiFunction = self.__mfEnabled, extraArguments = self.__extraArgs )
137                 self.__userFunction = self.__userOperator.appliedTo # Pour le calcul Direct
138             else:
139                 raise TypeError("User defined function or method has to be provided for finite differences approximation.")
140         else:
141             self.__userOperator = Operator( name = self.__name, fromMethod = Function, avoidingRedundancy = self.__avoidRC, inputAsMultiFunction = self.__mfEnabled, extraArguments = self.__extraArgs )
142             self.__userFunction = self.__userOperator.appliedTo
143         #
144         self.__centeredDF = bool(centeredDF)
145         if abs(float(increment)) > 1.e-15:
146             self.__increment  = float(increment)
147         else:
148             self.__increment  = 0.01
149         if dX is None:
150             self.__dX     = None
151         else:
152             self.__dX     = numpy.asmatrix(numpy.ravel( dX )).T
153         logging.debug("FDA Reduction des doublons de calcul : %s"%self.__avoidRC)
154         if self.__avoidRC:
155             logging.debug("FDA Tolerance de determination des doublons : %.2e"%self.__tolerBP)
156
157     # ---------------------------------------------------------
158     def __doublon__(self, e, l, n, v=None):
159         __ac, __iac = False, -1
160         for i in range(len(l)-1,-1,-1):
161             if numpy.linalg.norm(e - l[i]) < self.__tolerBP * n[i]:
162                 __ac, __iac = True, i
163                 if v is not None: logging.debug("FDA Cas%s déja calculé, récupération du doublon %i"%(v,__iac))
164                 break
165         return __ac, __iac
166
167     # ---------------------------------------------------------
168     def DirectOperator(self, X, **extraArgs ):
169         """
170         Calcul du direct à l'aide de la fonction fournie.
171
172         NB : les extraArgs sont là pour assurer la compatibilité d'appel, mais
173         ne doivent pas être données ici à la fonction utilisateur.
174         """
175         logging.debug("FDA Calcul DirectOperator (explicite)")
176         if self.__mfEnabled:
177             _HX = self.__userFunction( X, argsAsSerie = True )
178         else:
179             _X = numpy.asmatrix(numpy.ravel( X )).T
180             _HX = numpy.ravel(self.__userFunction( _X ))
181         #
182         return _HX
183
184     # ---------------------------------------------------------
185     def TangentMatrix(self, X ):
186         """
187         Calcul de l'opérateur tangent comme la Jacobienne par différences finies,
188         c'est-à-dire le gradient de H en X. On utilise des différences finies
189         directionnelles autour du point X. X est un numpy.matrix.
190
191         Différences finies centrées (approximation d'ordre 2):
192         1/ Pour chaque composante i de X, on ajoute et on enlève la perturbation
193            dX[i] à la  composante X[i], pour composer X_plus_dXi et X_moins_dXi, et
194            on calcule les réponses HX_plus_dXi = H( X_plus_dXi ) et HX_moins_dXi =
195            H( X_moins_dXi )
196         2/ On effectue les différences (HX_plus_dXi-HX_moins_dXi) et on divise par
197            le pas 2*dXi
198         3/ Chaque résultat, par composante, devient une colonne de la Jacobienne
199
200         Différences finies non centrées (approximation d'ordre 1):
201         1/ Pour chaque composante i de X, on ajoute la perturbation dX[i] à la
202            composante X[i] pour composer X_plus_dXi, et on calcule la réponse
203            HX_plus_dXi = H( X_plus_dXi )
204         2/ On calcule la valeur centrale HX = H(X)
205         3/ On effectue les différences (HX_plus_dXi-HX) et on divise par
206            le pas dXi
207         4/ Chaque résultat, par composante, devient une colonne de la Jacobienne
208
209         """
210         logging.debug("FDA Début du calcul de la Jacobienne")
211         logging.debug("FDA   Incrément de............: %s*X"%float(self.__increment))
212         logging.debug("FDA   Approximation centrée...: %s"%(self.__centeredDF))
213         #
214         if X is None or len(X)==0:
215             raise ValueError("Nominal point X for approximate derivatives can not be None or void (given X: %s)."%(str(X),))
216         #
217         _X = numpy.asmatrix(numpy.ravel( X )).T
218         #
219         if self.__dX is None:
220             _dX  = self.__increment * _X
221         else:
222             _dX = numpy.asmatrix(numpy.ravel( self.__dX )).T
223         #
224         if (_dX == 0.).any():
225             moyenne = _dX.mean()
226             if moyenne == 0.:
227                 _dX = numpy.where( _dX == 0., float(self.__increment), _dX )
228             else:
229                 _dX = numpy.where( _dX == 0., moyenne, _dX )
230         #
231         __alreadyCalculated  = False
232         if self.__avoidRC:
233             __bidon, __alreadyCalculatedP = self.__doublon__(_X,  self.__listJPCP, self.__listJPPN, None)
234             __bidon, __alreadyCalculatedI = self.__doublon__(_dX, self.__listJPCI, self.__listJPIN, None)
235             if __alreadyCalculatedP == __alreadyCalculatedI > -1:
236                 __alreadyCalculated, __i = True, __alreadyCalculatedP
237                 logging.debug("FDA Cas J déja calculé, récupération du doublon %i"%__i)
238         #
239         if __alreadyCalculated:
240             logging.debug("FDA   Calcul Jacobienne (par récupération du doublon %i)"%__i)
241             _Jacobienne = self.__listJPCR[__i]
242         else:
243             logging.debug("FDA   Calcul Jacobienne (explicite)")
244             if self.__centeredDF:
245                 #
246                 if self.__mpEnabled and not self.__mfEnabled:
247                     funcrepr = {
248                         "__userFunction__path" : self.__userFunction__path,
249                         "__userFunction__modl" : self.__userFunction__modl,
250                         "__userFunction__name" : self.__userFunction__name,
251                     }
252                     _jobs = []
253                     for i in range( len(_dX) ):
254                         _dXi            = _dX[i]
255                         _X_plus_dXi     = numpy.array( _X.A1, dtype=float )
256                         _X_plus_dXi[i]  = _X[i] + _dXi
257                         _X_moins_dXi    = numpy.array( _X.A1, dtype=float )
258                         _X_moins_dXi[i] = _X[i] - _dXi
259                         #
260                         _jobs.append( (_X_plus_dXi,  self.__extraArgs, funcrepr) )
261                         _jobs.append( (_X_moins_dXi, self.__extraArgs, funcrepr) )
262                     #
263                     import multiprocessing
264                     self.__pool = multiprocessing.Pool(self.__mpWorkers)
265                     _HX_plusmoins_dX = self.__pool.map( ExecuteFunction, _jobs )
266                     self.__pool.close()
267                     self.__pool.join()
268                     #
269                     _Jacobienne  = []
270                     for i in range( len(_dX) ):
271                         _Jacobienne.append( numpy.ravel( _HX_plusmoins_dX[2*i] - _HX_plusmoins_dX[2*i+1] ) / (2.*_dX[i]) )
272                     #
273                 elif self.__mfEnabled:
274                     _xserie = []
275                     for i in range( len(_dX) ):
276                         _dXi            = _dX[i]
277                         _X_plus_dXi     = numpy.array( _X.A1, dtype=float )
278                         _X_plus_dXi[i]  = _X[i] + _dXi
279                         _X_moins_dXi    = numpy.array( _X.A1, dtype=float )
280                         _X_moins_dXi[i] = _X[i] - _dXi
281                         #
282                         _xserie.append( _X_plus_dXi )
283                         _xserie.append( _X_moins_dXi )
284                     #
285                     _HX_plusmoins_dX = self.DirectOperator( _xserie )
286                      #
287                     _Jacobienne  = []
288                     for i in range( len(_dX) ):
289                         _Jacobienne.append( numpy.ravel( _HX_plusmoins_dX[2*i] - _HX_plusmoins_dX[2*i+1] ) / (2.*_dX[i]) )
290                     #
291                 else:
292                     _Jacobienne  = []
293                     for i in range( _dX.size ):
294                         _dXi            = _dX[i]
295                         _X_plus_dXi     = numpy.array( _X.A1, dtype=float )
296                         _X_plus_dXi[i]  = _X[i] + _dXi
297                         _X_moins_dXi    = numpy.array( _X.A1, dtype=float )
298                         _X_moins_dXi[i] = _X[i] - _dXi
299                         #
300                         _HX_plus_dXi    = self.DirectOperator( _X_plus_dXi )
301                         _HX_moins_dXi   = self.DirectOperator( _X_moins_dXi )
302                         #
303                         _Jacobienne.append( numpy.ravel( _HX_plus_dXi - _HX_moins_dXi ) / (2.*_dXi) )
304                 #
305             else:
306                 #
307                 if self.__mpEnabled and not self.__mfEnabled:
308                     funcrepr = {
309                         "__userFunction__path" : self.__userFunction__path,
310                         "__userFunction__modl" : self.__userFunction__modl,
311                         "__userFunction__name" : self.__userFunction__name,
312                     }
313                     _jobs = []
314                     _jobs.append( (_X.A1, self.__extraArgs, funcrepr) )
315                     for i in range( len(_dX) ):
316                         _X_plus_dXi    = numpy.array( _X.A1, dtype=float )
317                         _X_plus_dXi[i] = _X[i] + _dX[i]
318                         #
319                         _jobs.append( (_X_plus_dXi, self.__extraArgs, funcrepr) )
320                     #
321                     import multiprocessing
322                     self.__pool = multiprocessing.Pool(self.__mpWorkers)
323                     _HX_plus_dX = self.__pool.map( ExecuteFunction, _jobs )
324                     self.__pool.close()
325                     self.__pool.join()
326                     #
327                     _HX = _HX_plus_dX.pop(0)
328                     #
329                     _Jacobienne = []
330                     for i in range( len(_dX) ):
331                         _Jacobienne.append( numpy.ravel(( _HX_plus_dX[i] - _HX ) / _dX[i]) )
332                     #
333                 elif self.__mfEnabled:
334                     _xserie = []
335                     _xserie.append( _X.A1 )
336                     for i in range( len(_dX) ):
337                         _X_plus_dXi    = numpy.array( _X.A1, dtype=float )
338                         _X_plus_dXi[i] = _X[i] + _dX[i]
339                         #
340                         _xserie.append( _X_plus_dXi )
341                     #
342                     _HX_plus_dX = self.DirectOperator( _xserie )
343                     #
344                     _HX = _HX_plus_dX.pop(0)
345                     #
346                     _Jacobienne = []
347                     for i in range( len(_dX) ):
348                         _Jacobienne.append( numpy.ravel(( _HX_plus_dX[i] - _HX ) / _dX[i]) )
349                    #
350                 else:
351                     _Jacobienne  = []
352                     _HX = self.DirectOperator( _X )
353                     for i in range( _dX.size ):
354                         _dXi            = _dX[i]
355                         _X_plus_dXi     = numpy.array( _X.A1, dtype=float )
356                         _X_plus_dXi[i]  = _X[i] + _dXi
357                         #
358                         _HX_plus_dXi = self.DirectOperator( _X_plus_dXi )
359                         #
360                         _Jacobienne.append( numpy.ravel(( _HX_plus_dXi - _HX ) / _dXi) )
361                 #
362             #
363             _Jacobienne = numpy.asmatrix( numpy.vstack( _Jacobienne ) ).T
364             if self.__avoidRC:
365                 if self.__lenghtRJ < 0: self.__lenghtRJ = 2 * _X.size
366                 while len(self.__listJPCP) > self.__lenghtRJ:
367                     self.__listJPCP.pop(0)
368                     self.__listJPCI.pop(0)
369                     self.__listJPCR.pop(0)
370                     self.__listJPPN.pop(0)
371                     self.__listJPIN.pop(0)
372                 self.__listJPCP.append( copy.copy(_X) )
373                 self.__listJPCI.append( copy.copy(_dX) )
374                 self.__listJPCR.append( copy.copy(_Jacobienne) )
375                 self.__listJPPN.append( numpy.linalg.norm(_X) )
376                 self.__listJPIN.append( numpy.linalg.norm(_Jacobienne) )
377         #
378         logging.debug("FDA Fin du calcul de la Jacobienne")
379         #
380         return _Jacobienne
381
382     # ---------------------------------------------------------
383     def TangentOperator(self, paire, **extraArgs ):
384         """
385         Calcul du tangent à l'aide de la Jacobienne.
386
387         NB : les extraArgs sont là pour assurer la compatibilité d'appel, mais
388         ne doivent pas être données ici à la fonction utilisateur.
389         """
390         if self.__mfEnabled:
391             assert len(paire) == 1, "Incorrect lenght of arguments"
392             _paire = paire[0]
393             assert len(_paire) == 2, "Incorrect number of arguments"
394         else:
395             assert len(paire) == 2, "Incorrect number of arguments"
396             _paire = paire
397         X, dX = _paire
398         _Jacobienne = self.TangentMatrix( X )
399         if dX is None or len(dX) == 0:
400             #
401             # Calcul de la forme matricielle si le second argument est None
402             # -------------------------------------------------------------
403             if self.__mfEnabled: return [_Jacobienne,]
404             else:                return _Jacobienne
405         else:
406             #
407             # Calcul de la valeur linéarisée de H en X appliqué à dX
408             # ------------------------------------------------------
409             _dX = numpy.asmatrix(numpy.ravel( dX )).T
410             _HtX = numpy.dot(_Jacobienne, _dX)
411             if self.__mfEnabled: return [_HtX.A1,]
412             else:                return _HtX.A1
413
414     # ---------------------------------------------------------
415     def AdjointOperator(self, paire, **extraArgs ):
416         """
417         Calcul de l'adjoint à l'aide de la Jacobienne.
418
419         NB : les extraArgs sont là pour assurer la compatibilité d'appel, mais
420         ne doivent pas être données ici à la fonction utilisateur.
421         """
422         if self.__mfEnabled:
423             assert len(paire) == 1, "Incorrect lenght of arguments"
424             _paire = paire[0]
425             assert len(_paire) == 2, "Incorrect number of arguments"
426         else:
427             assert len(paire) == 2, "Incorrect number of arguments"
428             _paire = paire
429         X, Y = _paire
430         _JacobienneT = self.TangentMatrix( X ).T
431         if Y is None or len(Y) == 0:
432             #
433             # Calcul de la forme matricielle si le second argument est None
434             # -------------------------------------------------------------
435             if self.__mfEnabled: return [_JacobienneT,]
436             else:                return _JacobienneT
437         else:
438             #
439             # Calcul de la valeur de l'adjoint en X appliqué à Y
440             # --------------------------------------------------
441             _Y = numpy.asmatrix(numpy.ravel( Y )).T
442             _HaY = numpy.dot(_JacobienneT, _Y)
443             if self.__mfEnabled: return [_HaY.A1,]
444             else:                return _HaY.A1
445
446 # ==============================================================================
447 def EnsembleOfCenteredPerturbations( _bgcenter, _bgcovariance, _nbmembers ):
448     "Génération d'un ensemble de taille _nbmembers-1 d'états aléatoires centrés"
449     #
450     _bgcenter = numpy.ravel(_bgcenter)[:,None]
451     if _nbmembers < 1:
452         raise ValueError("Number of members has to be strictly more than 1 (given number: %s)."%(str(_nbmembers),))
453     #
454     if _bgcovariance is None:
455         BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers)
456     else:
457         _Z = numpy.random.multivariate_normal(numpy.zeros(_bgcenter.size), _bgcovariance, size=_nbmembers).T
458         BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers) + _Z
459     #
460     return BackgroundEnsemble
461
462 # ==============================================================================
463 def EnsembleOfBackgroundPerturbations( _bgcenter, _bgcovariance, _nbmembers, _withSVD = True):
464     "Génération d'un ensemble de taille _nbmembers-1 d'états aléatoires centrés"
465     def __CenteredRandomAnomalies(Zr, N):
466         """
467         Génère une matrice de N anomalies aléatoires centrées sur Zr selon les
468         notes manuscrites de MB et conforme au code de PS avec eps = -1
469         """
470         eps = -1
471         Q = numpy.identity(N-1)-numpy.ones((N-1,N-1))/numpy.sqrt(N)/(numpy.sqrt(N)-eps)
472         Q = numpy.concatenate((Q, [eps*numpy.ones(N-1)/numpy.sqrt(N)]), axis=0)
473         R, _ = numpy.linalg.qr(numpy.random.normal(size = (N-1,N-1)))
474         Q = numpy.dot(Q,R)
475         Zr = numpy.dot(Q,Zr)
476         return Zr.T
477     #
478     _bgcenter = numpy.ravel(_bgcenter).reshape((-1,1))
479     if _nbmembers < 1:
480         raise ValueError("Number of members has to be strictly more than 1 (given number: %s)."%(str(_nbmembers),))
481     if _bgcovariance is None:
482         BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers)
483     else:
484         if _withSVD:
485             U, s, V = numpy.linalg.svd(_bgcovariance, full_matrices=False)
486             _nbctl = _bgcenter.size
487             if _nbmembers > _nbctl:
488                 _Z = numpy.concatenate((numpy.dot(
489                     numpy.diag(numpy.sqrt(s[:_nbctl])), V[:_nbctl]),
490                     numpy.random.multivariate_normal(numpy.zeros(_nbctl),_bgcovariance,_nbmembers-1-_nbctl)), axis = 0)
491             else:
492                 _Z = numpy.dot(numpy.diag(numpy.sqrt(s[:_nbmembers-1])), V[:_nbmembers-1])
493             _Zca = __CenteredRandomAnomalies(_Z, _nbmembers)
494             BackgroundEnsemble = _bgcenter + _Zca
495         else:
496             if max(abs(_bgcovariance.flatten())) > 0:
497                 _nbctl = _bgcenter.size
498                 _Z = numpy.random.multivariate_normal(numpy.zeros(_nbctl),_bgcovariance,_nbmembers-1)
499                 _Zca = __CenteredRandomAnomalies(_Z, _nbmembers)
500                 BackgroundEnsemble = _bgcenter + _Zca
501             else:
502                 BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers)
503     #
504     return BackgroundEnsemble
505
506 # ==============================================================================
507 def EnsembleMean( __Ensemble ):
508     "Renvoie la moyenne empirique d'un ensemble"
509     return numpy.asarray(__Ensemble).mean(axis=1, dtype=mfp).astype('float').reshape((-1,1))
510
511 # ==============================================================================
512 def EnsembleOfAnomalies( __Ensemble, __OptMean = None, __Normalisation = 1.):
513     "Renvoie les anomalies centrées à partir d'un ensemble"
514     if __OptMean is None:
515         __Em = EnsembleMean( __Ensemble )
516     else:
517         __Em = numpy.ravel( __OptMean ).reshape((-1,1))
518     #
519     return __Normalisation * (numpy.asarray( __Ensemble ) - __Em)
520
521 # ==============================================================================
522 def EnsembleErrorCovariance( __Ensemble, __quick = False ):
523     "Renvoie l'estimation empirique de la covariance d'ensemble"
524     if __quick:
525         # Covariance rapide mais rarement définie positive
526         __Covariance = numpy.cov( __Ensemble )
527     else:
528         # Résultat souvent identique à numpy.cov, mais plus robuste
529         __n, __m = numpy.asarray( __Ensemble ).shape
530         __Anomalies = EnsembleOfAnomalies( __Ensemble )
531         # Estimation empirique
532         __Covariance = ( __Anomalies @ __Anomalies.T ) / (__m-1)
533         # Assure la symétrie
534         __Covariance = ( __Covariance + __Covariance.T ) * 0.5
535         # Assure la positivité
536         __epsilon    = mpr*numpy.trace( __Covariance )
537         __Covariance = __Covariance + __epsilon * numpy.identity(__n)
538     #
539     return __Covariance
540
541 # ==============================================================================
542 def EnsemblePerturbationWithGivenCovariance( __Ensemble, __Covariance, __Seed=None ):
543     "Ajout d'une perturbation à chaque membre d'un ensemble selon une covariance prescrite"
544     if hasattr(__Covariance,"assparsematrix"):
545         if (abs(__Ensemble).mean() > mpr) and (abs(__Covariance.assparsematrix())/abs(__Ensemble).mean() < mpr).all():
546             # Traitement d'une covariance nulle ou presque
547             return __Ensemble
548         if (abs(__Ensemble).mean() <= mpr) and (abs(__Covariance.assparsematrix()) < mpr).all():
549             # Traitement d'une covariance nulle ou presque
550             return __Ensemble
551     else:
552         if (abs(__Ensemble).mean() > mpr) and (abs(__Covariance)/abs(__Ensemble).mean() < mpr).all():
553             # Traitement d'une covariance nulle ou presque
554             return __Ensemble
555         if (abs(__Ensemble).mean() <= mpr) and (abs(__Covariance) < mpr).all():
556             # Traitement d'une covariance nulle ou presque
557             return __Ensemble
558     #
559     __n, __m = __Ensemble.shape
560     if __Seed is not None: numpy.random.seed(__Seed)
561     #
562     if hasattr(__Covariance,"isscalar") and __Covariance.isscalar():
563         # Traitement d'une covariance multiple de l'identité
564         __zero = 0.
565         __std  = numpy.sqrt(__Covariance.assparsematrix())
566         __Ensemble += numpy.random.normal(__zero, __std, size=(__m,__n)).T
567     #
568     elif hasattr(__Covariance,"isvector") and __Covariance.isvector():
569         # Traitement d'une covariance diagonale avec variances non identiques
570         __zero = numpy.zeros(__n)
571         __std  = numpy.sqrt(__Covariance.assparsematrix())
572         __Ensemble += numpy.asarray([numpy.random.normal(__zero, __std) for i in range(__m)]).T
573     #
574     elif hasattr(__Covariance,"ismatrix") and __Covariance.ismatrix():
575         # Traitement d'une covariance pleine
576         __Ensemble += numpy.random.multivariate_normal(numpy.zeros(__n), __Covariance.asfullmatrix(__n), size=__m).T
577     #
578     elif isinstance(__Covariance, numpy.ndarray):
579         # Traitement d'une covariance numpy pleine, sachant qu'on arrive ici en dernier
580         __Ensemble += numpy.random.multivariate_normal(numpy.zeros(__n), __Covariance, size=__m).T
581     #
582     else:
583         raise ValueError("Error in ensemble perturbation with inadequate covariance specification")
584     #
585     return __Ensemble
586
587 # ==============================================================================
588 def CovarianceInflation(
589         InputCovOrEns,
590         InflationType   = None,
591         InflationFactor = None,
592         BackgroundCov   = None,
593         ):
594     """
595     Inflation applicable soit sur Pb ou Pa, soit sur les ensembles EXb ou EXa
596
597     Synthèse : Hunt 2007, section 2.3.5
598     """
599     if InflationFactor is None:
600         return InputCovOrEns
601     else:
602         InflationFactor = float(InflationFactor)
603     #
604     if InflationType in ["MultiplicativeOnAnalysisCovariance", "MultiplicativeOnBackgroundCovariance"]:
605         if InflationFactor < 1.:
606             raise ValueError("Inflation factor for multiplicative inflation has to be greater or equal than 1.")
607         if InflationFactor < 1.+mpr:
608             return InputCovOrEns
609         OutputCovOrEns = InflationFactor**2 * InputCovOrEns
610     #
611     elif InflationType in ["MultiplicativeOnAnalysisAnomalies", "MultiplicativeOnBackgroundAnomalies"]:
612         if InflationFactor < 1.:
613             raise ValueError("Inflation factor for multiplicative inflation has to be greater or equal than 1.")
614         if InflationFactor < 1.+mpr:
615             return InputCovOrEns
616         InputCovOrEnsMean = InputCovOrEns.mean(axis=1, dtype=mfp).astype('float')
617         OutputCovOrEns = InputCovOrEnsMean[:,numpy.newaxis] \
618             + InflationFactor * (InputCovOrEns - InputCovOrEnsMean[:,numpy.newaxis])
619     #
620     elif InflationType in ["AdditiveOnAnalysisCovariance", "AdditiveOnBackgroundCovariance"]:
621         if InflationFactor < 0.:
622             raise ValueError("Inflation factor for additive inflation has to be greater or equal than 0.")
623         if InflationFactor < mpr:
624             return InputCovOrEns
625         __n, __m = numpy.asarray(InputCovOrEns).shape
626         if __n != __m:
627             raise ValueError("Additive inflation can only be applied to squared (covariance) matrix.")
628         OutputCovOrEns = (1. - InflationFactor) * InputCovOrEns + InflationFactor * numpy.identity(__n)
629     #
630     elif InflationType == "HybridOnBackgroundCovariance":
631         if InflationFactor < 0.:
632             raise ValueError("Inflation factor for hybrid inflation has to be greater or equal than 0.")
633         if InflationFactor < mpr:
634             return InputCovOrEns
635         __n, __m = numpy.asarray(InputCovOrEns).shape
636         if __n != __m:
637             raise ValueError("Additive inflation can only be applied to squared (covariance) matrix.")
638         if BackgroundCov is None:
639             raise ValueError("Background covariance matrix B has to be given for hybrid inflation.")
640         if InputCovOrEns.shape != BackgroundCov.shape:
641             raise ValueError("Ensemble covariance matrix has to be of same size than background covariance matrix B.")
642         OutputCovOrEns = (1. - InflationFactor) * InputCovOrEns + InflationFactor * BackgroundCov
643     #
644     elif InflationType == "Relaxation":
645         raise NotImplementedError("InflationType Relaxation")
646     #
647     else:
648         raise ValueError("Error in inflation type, '%s' is not a valid keyword."%InflationType)
649     #
650     return OutputCovOrEns
651
652 # ==============================================================================
653 def QuantilesEstimations(selfA, A, Xa, HXa = None, Hm = None, HtM = None):
654     "Estimation des quantiles a posteriori (selfA est modifié)"
655     nbsamples = selfA._parameters["NumberOfSamplesForQuantiles"]
656     #
657     # Traitement des bornes
658     if "StateBoundsForQuantiles" in selfA._parameters:
659         LBounds = selfA._parameters["StateBoundsForQuantiles"] # Prioritaire
660     elif "Bounds" in selfA._parameters:
661         LBounds = selfA._parameters["Bounds"]  # Défaut raisonnable
662     else:
663         LBounds = None
664     if LBounds is not None:
665         LBounds = ForceNumericBounds( LBounds )
666     _Xa = numpy.ravel(Xa)
667     #
668     # Échantillonnage des états
669     YfQ  = None
670     EXr  = None
671     for i in range(nbsamples):
672         if selfA._parameters["SimulationForQuantiles"] == "Linear" and HtM is not None and HXa is not None:
673             dXr = (numpy.random.multivariate_normal(_Xa,A) - _Xa).reshape((-1,1))
674             if LBounds is not None: # "EstimateProjection" par défaut
675                 dXr = numpy.max(numpy.hstack((dXr,LBounds[:,0].reshape((-1,1))) - Xa),axis=1)
676                 dXr = numpy.min(numpy.hstack((dXr,LBounds[:,1].reshape((-1,1))) - Xa),axis=1)
677             dYr = HtM @ dXr
678             Yr = HXa.reshape((-1,1)) + dYr
679             if selfA._toStore("SampledStateForQuantiles"): Xr = _Xa + numpy.ravel(dXr)
680         elif selfA._parameters["SimulationForQuantiles"] == "NonLinear" and Hm is not None:
681             Xr = numpy.random.multivariate_normal(_Xa,A)
682             if LBounds is not None: # "EstimateProjection" par défaut
683                 Xr = numpy.max(numpy.hstack((Xr.reshape((-1,1)),LBounds[:,0].reshape((-1,1)))),axis=1)
684                 Xr = numpy.min(numpy.hstack((Xr.reshape((-1,1)),LBounds[:,1].reshape((-1,1)))),axis=1)
685             Yr = Hm( Xr )
686         else:
687             raise ValueError("Quantile simulations has only to be Linear or NonLinear.")
688         #
689         if YfQ is None:
690             YfQ = Yr.reshape((-1,1))
691             if selfA._toStore("SampledStateForQuantiles"): EXr = Xr.reshape((-1,1))
692         else:
693             YfQ = numpy.hstack((YfQ,Yr.reshape((-1,1))))
694             if selfA._toStore("SampledStateForQuantiles"): EXr = numpy.hstack((EXr,Xr.reshape((-1,1))))
695     #
696     # Extraction des quantiles
697     YfQ.sort(axis=-1)
698     YQ = None
699     for quantile in selfA._parameters["Quantiles"]:
700         if not (0. <= float(quantile) <= 1.): continue
701         indice = int(nbsamples * float(quantile) - 1./nbsamples)
702         if YQ is None: YQ = YfQ[:,indice].reshape((-1,1))
703         else:          YQ = numpy.hstack((YQ,YfQ[:,indice].reshape((-1,1))))
704     if YQ is not None: # Liste non vide de quantiles
705         selfA.StoredVariables["SimulationQuantiles"].store( YQ )
706     if selfA._toStore("SampledStateForQuantiles"):
707         selfA.StoredVariables["SampledStateForQuantiles"].store( EXr )
708     #
709     return 0
710
711 # ==============================================================================
712 def ForceNumericBounds( __Bounds ):
713     "Force les bornes à être des valeurs numériques, sauf si globalement None"
714     # Conserve une valeur par défaut à None s'il n'y a pas de bornes
715     if __Bounds is None: return None
716     # Converti toutes les bornes individuelles None à +/- l'infini
717     __Bounds = numpy.asarray( __Bounds, dtype=float )
718     if len(__Bounds.shape) != 2 or min(__Bounds.shape) <= 0 or __Bounds.shape[1] != 2:
719         raise ValueError("Incorrectly shaped bounds data")
720     __Bounds[numpy.isnan(__Bounds[:,0]),0] = -sys.float_info.max
721     __Bounds[numpy.isnan(__Bounds[:,1]),1] =  sys.float_info.max
722     return __Bounds
723
724 # ==============================================================================
725 def RecentredBounds( __Bounds, __Center):
726     "Recentre les bornes autour de 0, sauf si globalement None"
727     # Conserve une valeur par défaut à None s'il n'y a pas de bornes
728     if __Bounds is None: return None
729     # Recentre les valeurs numériques de bornes
730     return ForceNumericBounds( __Bounds ) - numpy.ravel( __Center ).transpose((-1,1))
731
732 # ==============================================================================
733 def ApplyBounds( __Vector, __Bounds, __newClip = True):
734     "Applique des bornes numériques à un point"
735     # Conserve une valeur par défaut s'il n'y a pas de bornes
736     if __Bounds is None: return __Vector
737     #
738     if not isinstance(__Vector, numpy.ndarray): # Is an array
739         raise ValueError("Incorrect array definition of vector data")
740     if not isinstance(__Bounds, numpy.ndarray): # Is an array
741         raise ValueError("Incorrect array definition of bounds data")
742     if 2*__Vector.size != __Bounds.size: # Is a 2 column array of vector lenght
743         raise ValueError("Incorrect bounds number to be applied for this vector")
744     if len(__Bounds.shape) != 2 or min(__Bounds.shape) <= 0 or __Bounds.shape[1] != 2:
745         raise ValueError("Incorrectly shaped bounds data")
746     #
747     if __newClip:
748         __Vector = __Vector.clip(
749             __Bounds[:,0].reshape(__Vector.shape),
750             __Bounds[:,1].reshape(__Vector.shape),
751             )
752     else:
753         __Vector = numpy.max(numpy.hstack((__Vector.reshape((-1,1)),numpy.asmatrix(__Bounds)[:,0])),axis=1)
754         __Vector = numpy.min(numpy.hstack((__Vector.reshape((-1,1)),numpy.asmatrix(__Bounds)[:,1])),axis=1)
755         __Vector = numpy.asarray(__Vector)
756     #
757     return __Vector
758
759 # ==============================================================================
760 def cekf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
761     """
762     Contrained Extended Kalman Filter
763     """
764     if selfA._parameters["EstimationOf"] == "Parameters":
765         selfA._parameters["StoreInternalVariables"] = True
766     selfA._parameters["Bounds"] = ForceNumericBounds( selfA._parameters["Bounds"] )
767     #
768     # Opérateurs
769     H = HO["Direct"].appliedControledFormTo
770     #
771     if selfA._parameters["EstimationOf"] == "State":
772         M = EM["Direct"].appliedControledFormTo
773     #
774     if CM is not None and "Tangent" in CM and U is not None:
775         Cm = CM["Tangent"].asMatrix(Xb)
776     else:
777         Cm = None
778     #
779     # Durée d'observation et tailles
780     if hasattr(Y,"stepnumber"):
781         duration = Y.stepnumber()
782         __p = numpy.cumprod(Y.shape())[-1]
783     else:
784         duration = 2
785         __p = numpy.array(Y).size
786     #
787     # Précalcul des inversions de B et R
788     if selfA._parameters["StoreInternalVariables"] \
789         or selfA._toStore("CostFunctionJ") \
790         or selfA._toStore("CostFunctionJb") \
791         or selfA._toStore("CostFunctionJo") \
792         or selfA._toStore("CurrentOptimum") \
793         or selfA._toStore("APosterioriCovariance"):
794         BI = B.getI()
795         RI = R.getI()
796     #
797     __n = Xb.size
798     #
799     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
800         Xn = Xb
801         Pn = B
802         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
803         selfA.StoredVariables["Analysis"].store( Xb )
804         if selfA._toStore("APosterioriCovariance"):
805             if hasattr(B,"asfullmatrix"):
806                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
807             else:
808                 selfA.StoredVariables["APosterioriCovariance"].store( B )
809         selfA._setInternalState("seed", numpy.random.get_state())
810     elif selfA._parameters["nextStep"]:
811         Xn = selfA._getInternalState("Xn")
812         Pn = selfA._getInternalState("Pn")
813     #
814     if selfA._parameters["EstimationOf"] == "Parameters":
815         XaMin            = Xn
816         previousJMinimum = numpy.finfo(float).max
817     #
818     for step in range(duration-1):
819         if hasattr(Y,"store"):
820             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
821         else:
822             Ynpu = numpy.ravel( Y ).reshape((__p,1))
823         #
824         Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xn)
825         Ht = Ht.reshape(Ynpu.size,Xn.size) # ADAO & check shape
826         Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = Xn)
827         Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape
828         #
829         if selfA._parameters["EstimationOf"] == "State":
830             Mt = EM["Tangent"].asMatrix(ValueForMethodForm = Xn)
831             Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape
832             Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = Xn)
833             Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape
834         #
835         if U is not None:
836             if hasattr(U,"store") and len(U)>1:
837                 Un = numpy.ravel( U[step] ).reshape((-1,1))
838             elif hasattr(U,"store") and len(U)==1:
839                 Un = numpy.ravel( U[0] ).reshape((-1,1))
840             else:
841                 Un = numpy.ravel( U ).reshape((-1,1))
842         else:
843             Un = None
844         #
845         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
846             Xn = ApplyBounds( Xn, selfA._parameters["Bounds"] )
847         #
848         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
849             Xn_predicted = numpy.ravel( M( (Xn, Un) ) ).reshape((__n,1))
850             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
851                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
852                 Xn_predicted = Xn_predicted + Cm * Un
853             Pn_predicted = Q + Mt * Pn * Ma
854         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
855             # --- > Par principe, M = Id, Q = 0
856             Xn_predicted = Xn
857             Pn_predicted = Pn
858         #
859         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
860             Xn_predicted = ApplyBounds( Xn_predicted, selfA._parameters["Bounds"] )
861         #
862         if selfA._parameters["EstimationOf"] == "State":
863             HX_predicted = numpy.ravel( H( (Xn_predicted, None) ) ).reshape((__p,1))
864             _Innovation  = Ynpu - HX_predicted
865         elif selfA._parameters["EstimationOf"] == "Parameters":
866             HX_predicted = numpy.ravel( H( (Xn_predicted, Un) ) ).reshape((__p,1))
867             _Innovation  = Ynpu - HX_predicted
868             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
869                 _Innovation = _Innovation - Cm * Un
870         #
871         Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
872         Xn = Xn_predicted + Kn * _Innovation
873         Pn = Pn_predicted - Kn * Ht * Pn_predicted
874         #
875         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
876             Xn = ApplyBounds( Xn, selfA._parameters["Bounds"] )
877         #
878         Xa = Xn # Pointeurs
879         #--------------------------
880         selfA._setInternalState("Xn", Xn)
881         selfA._setInternalState("Pn", Pn)
882         #--------------------------
883         #
884         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
885         # ---> avec analysis
886         selfA.StoredVariables["Analysis"].store( Xa )
887         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
888             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( H((Xa, Un)) )
889         if selfA._toStore("InnovationAtCurrentAnalysis"):
890             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
891         # ---> avec current state
892         if selfA._parameters["StoreInternalVariables"] \
893             or selfA._toStore("CurrentState"):
894             selfA.StoredVariables["CurrentState"].store( Xn )
895         if selfA._toStore("ForecastState"):
896             selfA.StoredVariables["ForecastState"].store( Xn_predicted )
897         if selfA._toStore("ForecastCovariance"):
898             selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted )
899         if selfA._toStore("BMA"):
900             selfA.StoredVariables["BMA"].store( Xn_predicted - Xa )
901         if selfA._toStore("InnovationAtCurrentState"):
902             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
903         if selfA._toStore("SimulatedObservationAtCurrentState") \
904             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
905             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
906         # ---> autres
907         if selfA._parameters["StoreInternalVariables"] \
908             or selfA._toStore("CostFunctionJ") \
909             or selfA._toStore("CostFunctionJb") \
910             or selfA._toStore("CostFunctionJo") \
911             or selfA._toStore("CurrentOptimum") \
912             or selfA._toStore("APosterioriCovariance"):
913             Jb  = float( 0.5 * (Xa - Xb).T @ (BI @ (Xa - Xb)) )
914             Jo  = float( 0.5 * _Innovation.T @ (RI @ _Innovation) )
915             J   = Jb + Jo
916             selfA.StoredVariables["CostFunctionJb"].store( Jb )
917             selfA.StoredVariables["CostFunctionJo"].store( Jo )
918             selfA.StoredVariables["CostFunctionJ" ].store( J )
919             #
920             if selfA._toStore("IndexOfOptimum") \
921                 or selfA._toStore("CurrentOptimum") \
922                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
923                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
924                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
925                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
926                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
927             if selfA._toStore("IndexOfOptimum"):
928                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
929             if selfA._toStore("CurrentOptimum"):
930                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
931             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
932                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
933             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
934                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
935             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
936                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
937             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
938                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
939         if selfA._toStore("APosterioriCovariance"):
940             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
941         if selfA._parameters["EstimationOf"] == "Parameters" \
942             and J < previousJMinimum:
943             previousJMinimum    = J
944             XaMin               = Xa
945             if selfA._toStore("APosterioriCovariance"):
946                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
947     #
948     # Stockage final supplémentaire de l'optimum en estimation de paramètres
949     # ----------------------------------------------------------------------
950     if selfA._parameters["EstimationOf"] == "Parameters":
951         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
952         selfA.StoredVariables["Analysis"].store( XaMin )
953         if selfA._toStore("APosterioriCovariance"):
954             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
955         if selfA._toStore("BMA"):
956             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
957     #
958     return 0
959
960 # ==============================================================================
961 def enks(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="EnKS16-KalmanFilterFormula"):
962     """
963     EnKS
964     """
965     #
966     # Opérateurs
967     H = HO["Direct"].appliedControledFormTo
968     #
969     if selfA._parameters["EstimationOf"] == "State":
970         M = EM["Direct"].appliedControledFormTo
971     #
972     if CM is not None and "Tangent" in CM and U is not None:
973         Cm = CM["Tangent"].asMatrix(Xb)
974     else:
975         Cm = None
976     #
977     # Précalcul des inversions de B et R
978     RIdemi = R.sqrtmI()
979     #
980     # Durée d'observation et tailles
981     LagL = selfA._parameters["SmootherLagL"]
982     if (not hasattr(Y,"store")) or (not hasattr(Y,"stepnumber")):
983         raise ValueError("Fixed-lag smoother requires a series of observation")
984     if Y.stepnumber() < LagL:
985         raise ValueError("Fixed-lag smoother requires a series of observation greater then the lag L")
986     duration = Y.stepnumber()
987     __p = numpy.cumprod(Y.shape())[-1]
988     __n = Xb.size
989     __m = selfA._parameters["NumberOfMembers"]
990     #
991     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
992         selfA.StoredVariables["Analysis"].store( Xb )
993         if selfA._toStore("APosterioriCovariance"):
994             if hasattr(B,"asfullmatrix"):
995                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
996             else:
997                 selfA.StoredVariables["APosterioriCovariance"].store( B )
998     #
999     # Calcul direct initial (on privilégie la mémorisation au recalcul)
1000     __seed = numpy.random.get_state()
1001     selfB = copy.deepcopy(selfA)
1002     selfB._parameters["StoreSupplementaryCalculations"] = ["CurrentEnsembleState"]
1003     if VariantM == "EnKS16-KalmanFilterFormula":
1004         etkf(selfB, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM = "KalmanFilterFormula")
1005     else:
1006         raise ValueError("VariantM has to be chosen in the authorized methods list.")
1007     if LagL > 0:
1008         EL  = selfB.StoredVariables["CurrentEnsembleState"][LagL-1]
1009     else:
1010         EL = EnsembleOfBackgroundPerturbations( Xb, None, __m ) # Cf. etkf
1011     selfA._parameters["SetSeed"] = numpy.random.set_state(__seed)
1012     #
1013     for step in range(LagL,duration-1):
1014         #
1015         sEL = selfB.StoredVariables["CurrentEnsembleState"][step+1-LagL:step+1]
1016         sEL.append(None)
1017         #
1018         if hasattr(Y,"store"):
1019             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
1020         else:
1021             Ynpu = numpy.ravel( Y ).reshape((__p,1))
1022         #
1023         if U is not None:
1024             if hasattr(U,"store") and len(U)>1:
1025                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
1026             elif hasattr(U,"store") and len(U)==1:
1027                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
1028             else:
1029                 Un = numpy.asmatrix(numpy.ravel( U )).T
1030         else:
1031             Un = None
1032         #
1033         #--------------------------
1034         if VariantM == "EnKS16-KalmanFilterFormula":
1035             if selfA._parameters["EstimationOf"] == "State": # Forecast
1036                 EL = M( [(EL[:,i], Un) for i in range(__m)],
1037                     argsAsSerie = True,
1038                     returnSerieAsArrayMatrix = True )
1039                 EL = EnsemblePerturbationWithGivenCovariance( EL, Q )
1040                 EZ = H( [(EL[:,i], Un) for i in range(__m)],
1041                     argsAsSerie = True,
1042                     returnSerieAsArrayMatrix = True )
1043                 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
1044                     Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
1045                     EZ = EZ + Cm * Un
1046             elif selfA._parameters["EstimationOf"] == "Parameters":
1047                 # --- > Par principe, M = Id, Q = 0
1048                 EZ = H( [(EL[:,i], Un) for i in range(__m)],
1049                     argsAsSerie = True,
1050                     returnSerieAsArrayMatrix = True )
1051             #
1052             vEm   = EL.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1053             vZm   = EZ.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
1054             #
1055             mS    = RIdemi @ EnsembleOfAnomalies( EZ, vZm, 1./math.sqrt(__m-1) )
1056             mS    = mS.reshape((-1,__m)) # Pour dimension 1
1057             delta = RIdemi @ ( Ynpu - vZm )
1058             mT    = numpy.linalg.inv( numpy.identity(__m) + mS.T @ mS )
1059             vw    = mT @ mS.T @ delta
1060             #
1061             Tdemi = numpy.real(scipy.linalg.sqrtm(mT))
1062             mU    = numpy.identity(__m)
1063             wTU   = (vw.reshape((__m,1)) + math.sqrt(__m-1) * Tdemi @ mU)
1064             #
1065             EX    = EnsembleOfAnomalies( EL, vEm, 1./math.sqrt(__m-1) )
1066             EL    = vEm + EX @ wTU
1067             #
1068             sEL[LagL] = EL
1069             for irl in range(LagL): # Lissage des L précédentes analysis
1070                 vEm = sEL[irl].mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1071                 EX = EnsembleOfAnomalies( sEL[irl], vEm, 1./math.sqrt(__m-1) )
1072                 sEL[irl] = vEm + EX @ wTU
1073             #
1074             # Conservation de l'analyse retrospective d'ordre 0 avant rotation
1075             Xa = sEL[0].mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1076             if selfA._toStore("APosterioriCovariance"):
1077                 EXn = sEL[0]
1078             #
1079             for irl in range(LagL):
1080                 sEL[irl] = sEL[irl+1]
1081             sEL[LagL] = None
1082         #--------------------------
1083         else:
1084             raise ValueError("VariantM has to be chosen in the authorized methods list.")
1085         #
1086         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1087         # ---> avec analysis
1088         selfA.StoredVariables["Analysis"].store( Xa )
1089         if selfA._toStore("APosterioriCovariance"):
1090             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(EXn) )
1091     #
1092     # Stockage des dernières analyses incomplètement remises à jour
1093     for irl in range(LagL):
1094         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1095         Xa = sEL[irl].mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1096         selfA.StoredVariables["Analysis"].store( Xa )
1097     #
1098     return 0
1099
1100 # ==============================================================================
1101 def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"):
1102     """
1103     Ensemble-Transform EnKF
1104     """
1105     if selfA._parameters["EstimationOf"] == "Parameters":
1106         selfA._parameters["StoreInternalVariables"] = True
1107     #
1108     # Opérateurs
1109     H = HO["Direct"].appliedControledFormTo
1110     #
1111     if selfA._parameters["EstimationOf"] == "State":
1112         M = EM["Direct"].appliedControledFormTo
1113     #
1114     if CM is not None and "Tangent" in CM and U is not None:
1115         Cm = CM["Tangent"].asMatrix(Xb)
1116     else:
1117         Cm = None
1118     #
1119     # Durée d'observation et tailles
1120     if hasattr(Y,"stepnumber"):
1121         duration = Y.stepnumber()
1122         __p = numpy.cumprod(Y.shape())[-1]
1123     else:
1124         duration = 2
1125         __p = numpy.array(Y).size
1126     #
1127     # Précalcul des inversions de B et R
1128     if selfA._parameters["StoreInternalVariables"] \
1129         or selfA._toStore("CostFunctionJ") \
1130         or selfA._toStore("CostFunctionJb") \
1131         or selfA._toStore("CostFunctionJo") \
1132         or selfA._toStore("CurrentOptimum") \
1133         or selfA._toStore("APosterioriCovariance"):
1134         BI = B.getI()
1135         RI = R.getI()
1136     elif VariantM != "KalmanFilterFormula":
1137         RI = R.getI()
1138     if VariantM == "KalmanFilterFormula":
1139         RIdemi = R.sqrtmI()
1140     #
1141     __n = Xb.size
1142     __m = selfA._parameters["NumberOfMembers"]
1143     #
1144     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
1145         Xn = EnsembleOfBackgroundPerturbations( Xb, None, __m )
1146         selfA.StoredVariables["Analysis"].store( Xb )
1147         if selfA._toStore("APosterioriCovariance"):
1148             if hasattr(B,"asfullmatrix"):
1149                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
1150             else:
1151                 selfA.StoredVariables["APosterioriCovariance"].store( B )
1152         selfA._setInternalState("seed", numpy.random.get_state())
1153     elif selfA._parameters["nextStep"]:
1154         Xn = selfA._getInternalState("Xn")
1155     #
1156     previousJMinimum = numpy.finfo(float).max
1157     #
1158     for step in range(duration-1):
1159         numpy.random.set_state(selfA._getInternalState("seed"))
1160         if hasattr(Y,"store"):
1161             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
1162         else:
1163             Ynpu = numpy.ravel( Y ).reshape((__p,1))
1164         #
1165         if U is not None:
1166             if hasattr(U,"store") and len(U)>1:
1167                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
1168             elif hasattr(U,"store") and len(U)==1:
1169                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
1170             else:
1171                 Un = numpy.asmatrix(numpy.ravel( U )).T
1172         else:
1173             Un = None
1174         #
1175         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
1176             Xn = CovarianceInflation( Xn,
1177                 selfA._parameters["InflationType"],
1178                 selfA._parameters["InflationFactor"],
1179                 )
1180         #
1181         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
1182             EMX = M( [(Xn[:,i], Un) for i in range(__m)],
1183                 argsAsSerie = True,
1184                 returnSerieAsArrayMatrix = True )
1185             Xn_predicted = EnsemblePerturbationWithGivenCovariance( EMX, Q )
1186             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
1187                 argsAsSerie = True,
1188                 returnSerieAsArrayMatrix = True )
1189             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
1190                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
1191                 Xn_predicted = Xn_predicted + Cm * Un
1192         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
1193             # --- > Par principe, M = Id, Q = 0
1194             Xn_predicted = EMX = Xn
1195             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
1196                 argsAsSerie = True,
1197                 returnSerieAsArrayMatrix = True )
1198         #
1199         # Mean of forecast and observation of forecast
1200         Xfm  = Xn_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1201         Hfm  = HX_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
1202         #
1203         # Anomalies
1204         EaX   = EnsembleOfAnomalies( Xn_predicted, Xfm )
1205         EaHX  = EnsembleOfAnomalies( HX_predicted, Hfm)
1206         #
1207         #--------------------------
1208         if VariantM == "KalmanFilterFormula":
1209             mS    = RIdemi * EaHX / math.sqrt(__m-1)
1210             mS    = mS.reshape((-1,__m)) # Pour dimension 1
1211             delta = RIdemi * ( Ynpu - Hfm )
1212             mT    = numpy.linalg.inv( numpy.identity(__m) + mS.T @ mS )
1213             vw    = mT @ mS.T @ delta
1214             #
1215             Tdemi = numpy.real(scipy.linalg.sqrtm(mT))
1216             mU    = numpy.identity(__m)
1217             #
1218             EaX   = EaX / math.sqrt(__m-1)
1219             Xn    = Xfm + EaX @ ( vw.reshape((__m,1)) + math.sqrt(__m-1) * Tdemi @ mU )
1220         #--------------------------
1221         elif VariantM == "Variational":
1222             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
1223             def CostFunction(w):
1224                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1225                 _Jo = 0.5 * _A.T @ (RI * _A)
1226                 _Jb = 0.5 * (__m-1) * w.T @ w
1227                 _J  = _Jo + _Jb
1228                 return float(_J)
1229             def GradientOfCostFunction(w):
1230                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1231                 _GardJo = - EaHX.T @ (RI * _A)
1232                 _GradJb = (__m-1) * w.reshape((__m,1))
1233                 _GradJ  = _GardJo + _GradJb
1234                 return numpy.ravel(_GradJ)
1235             vw = scipy.optimize.fmin_cg(
1236                 f           = CostFunction,
1237                 x0          = numpy.zeros(__m),
1238                 fprime      = GradientOfCostFunction,
1239                 args        = (),
1240                 disp        = False,
1241                 )
1242             #
1243             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1244             Htb = (__m-1) * numpy.identity(__m)
1245             Hta = Hto + Htb
1246             #
1247             Pta = numpy.linalg.inv( Hta )
1248             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1249             #
1250             Xn  = Xfm + EaX @ (vw[:,None] + EWa)
1251         #--------------------------
1252         elif VariantM == "FiniteSize11": # Jauge Boc2011
1253             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
1254             def CostFunction(w):
1255                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1256                 _Jo = 0.5 * _A.T @ (RI * _A)
1257                 _Jb = 0.5 * __m * math.log(1 + 1/__m + w.T @ w)
1258                 _J  = _Jo + _Jb
1259                 return float(_J)
1260             def GradientOfCostFunction(w):
1261                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1262                 _GardJo = - EaHX.T @ (RI * _A)
1263                 _GradJb = __m * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w)
1264                 _GradJ  = _GardJo + _GradJb
1265                 return numpy.ravel(_GradJ)
1266             vw = scipy.optimize.fmin_cg(
1267                 f           = CostFunction,
1268                 x0          = numpy.zeros(__m),
1269                 fprime      = GradientOfCostFunction,
1270                 args        = (),
1271                 disp        = False,
1272                 )
1273             #
1274             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1275             Htb = __m * \
1276                 ( (1 + 1/__m + vw.T @ vw) * numpy.identity(__m) - 2 * vw @ vw.T ) \
1277                 / (1 + 1/__m + vw.T @ vw)**2
1278             Hta = Hto + Htb
1279             #
1280             Pta = numpy.linalg.inv( Hta )
1281             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1282             #
1283             Xn  = Xfm + EaX @ (vw.reshape((__m,1)) + EWa)
1284         #--------------------------
1285         elif VariantM == "FiniteSize15": # Jauge Boc2015
1286             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
1287             def CostFunction(w):
1288                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1289                 _Jo = 0.5 * _A.T * RI * _A
1290                 _Jb = 0.5 * (__m+1) * math.log(1 + 1/__m + w.T @ w)
1291                 _J  = _Jo + _Jb
1292                 return float(_J)
1293             def GradientOfCostFunction(w):
1294                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1295                 _GardJo = - EaHX.T @ (RI * _A)
1296                 _GradJb = (__m+1) * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w)
1297                 _GradJ  = _GardJo + _GradJb
1298                 return numpy.ravel(_GradJ)
1299             vw = scipy.optimize.fmin_cg(
1300                 f           = CostFunction,
1301                 x0          = numpy.zeros(__m),
1302                 fprime      = GradientOfCostFunction,
1303                 args        = (),
1304                 disp        = False,
1305                 )
1306             #
1307             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1308             Htb = (__m+1) * \
1309                 ( (1 + 1/__m + vw.T @ vw) * numpy.identity(__m) - 2 * vw @ vw.T ) \
1310                 / (1 + 1/__m + vw.T @ vw)**2
1311             Hta = Hto + Htb
1312             #
1313             Pta = numpy.linalg.inv( Hta )
1314             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1315             #
1316             Xn  = Xfm + EaX @ (vw.reshape((__m,1)) + EWa)
1317         #--------------------------
1318         elif VariantM == "FiniteSize16": # Jauge Boc2016
1319             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
1320             def CostFunction(w):
1321                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1322                 _Jo = 0.5 * _A.T @ (RI * _A)
1323                 _Jb = 0.5 * (__m+1) * math.log(1 + 1/__m + w.T @ w / (__m-1))
1324                 _J  = _Jo + _Jb
1325                 return float(_J)
1326             def GradientOfCostFunction(w):
1327                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1328                 _GardJo = - EaHX.T @ (RI * _A)
1329                 _GradJb = ((__m+1) / (__m-1)) * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w / (__m-1))
1330                 _GradJ  = _GardJo + _GradJb
1331                 return numpy.ravel(_GradJ)
1332             vw = scipy.optimize.fmin_cg(
1333                 f           = CostFunction,
1334                 x0          = numpy.zeros(__m),
1335                 fprime      = GradientOfCostFunction,
1336                 args        = (),
1337                 disp        = False,
1338                 )
1339             #
1340             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1341             Htb = ((__m+1) / (__m-1)) * \
1342                 ( (1 + 1/__m + vw.T @ vw / (__m-1)) * numpy.identity(__m) - 2 * vw @ vw.T / (__m-1) ) \
1343                 / (1 + 1/__m + vw.T @ vw / (__m-1))**2
1344             Hta = Hto + Htb
1345             #
1346             Pta = numpy.linalg.inv( Hta )
1347             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1348             #
1349             Xn  = Xfm + EaX @ (vw[:,None] + EWa)
1350         #--------------------------
1351         else:
1352             raise ValueError("VariantM has to be chosen in the authorized methods list.")
1353         #
1354         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
1355             Xn = CovarianceInflation( Xn,
1356                 selfA._parameters["InflationType"],
1357                 selfA._parameters["InflationFactor"],
1358                 )
1359         #
1360         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1361         #--------------------------
1362         selfA._setInternalState("Xn", Xn)
1363         selfA._setInternalState("seed", numpy.random.get_state())
1364         #--------------------------
1365         #
1366         if selfA._parameters["StoreInternalVariables"] \
1367             or selfA._toStore("CostFunctionJ") \
1368             or selfA._toStore("CostFunctionJb") \
1369             or selfA._toStore("CostFunctionJo") \
1370             or selfA._toStore("APosterioriCovariance") \
1371             or selfA._toStore("InnovationAtCurrentAnalysis") \
1372             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
1373             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1374             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
1375             _Innovation = Ynpu - _HXa
1376         #
1377         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1378         # ---> avec analysis
1379         selfA.StoredVariables["Analysis"].store( Xa )
1380         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
1381             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
1382         if selfA._toStore("InnovationAtCurrentAnalysis"):
1383             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
1384         # ---> avec current state
1385         if selfA._parameters["StoreInternalVariables"] \
1386             or selfA._toStore("CurrentState"):
1387             selfA.StoredVariables["CurrentState"].store( Xn )
1388         if selfA._toStore("ForecastState"):
1389             selfA.StoredVariables["ForecastState"].store( EMX )
1390         if selfA._toStore("ForecastCovariance"):
1391             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(EMX) )
1392         if selfA._toStore("BMA"):
1393             selfA.StoredVariables["BMA"].store( EMX - Xa )
1394         if selfA._toStore("InnovationAtCurrentState"):
1395             selfA.StoredVariables["InnovationAtCurrentState"].store( - HX_predicted + Ynpu )
1396         if selfA._toStore("SimulatedObservationAtCurrentState") \
1397             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1398             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
1399         # ---> autres
1400         if selfA._parameters["StoreInternalVariables"] \
1401             or selfA._toStore("CostFunctionJ") \
1402             or selfA._toStore("CostFunctionJb") \
1403             or selfA._toStore("CostFunctionJo") \
1404             or selfA._toStore("CurrentOptimum") \
1405             or selfA._toStore("APosterioriCovariance"):
1406             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
1407             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
1408             J   = Jb + Jo
1409             selfA.StoredVariables["CostFunctionJb"].store( Jb )
1410             selfA.StoredVariables["CostFunctionJo"].store( Jo )
1411             selfA.StoredVariables["CostFunctionJ" ].store( J )
1412             #
1413             if selfA._toStore("IndexOfOptimum") \
1414                 or selfA._toStore("CurrentOptimum") \
1415                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
1416                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
1417                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
1418                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1419                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1420             if selfA._toStore("IndexOfOptimum"):
1421                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1422             if selfA._toStore("CurrentOptimum"):
1423                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
1424             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1425                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
1426             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1427                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1428             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1429                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1430             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1431                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1432         if selfA._toStore("APosterioriCovariance"):
1433             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
1434         if selfA._parameters["EstimationOf"] == "Parameters" \
1435             and J < previousJMinimum:
1436             previousJMinimum    = J
1437             XaMin               = Xa
1438             if selfA._toStore("APosterioriCovariance"):
1439                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
1440         # ---> Pour les smoothers
1441         if selfA._toStore("CurrentEnsembleState"):
1442             selfA.StoredVariables["CurrentEnsembleState"].store( Xn )
1443     #
1444     # Stockage final supplémentaire de l'optimum en estimation de paramètres
1445     # ----------------------------------------------------------------------
1446     if selfA._parameters["EstimationOf"] == "Parameters":
1447         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1448         selfA.StoredVariables["Analysis"].store( XaMin )
1449         if selfA._toStore("APosterioriCovariance"):
1450             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
1451         if selfA._toStore("BMA"):
1452             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
1453     #
1454     return 0
1455
1456 # ==============================================================================
1457 def exkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
1458     """
1459     Extended Kalman Filter
1460     """
1461     if selfA._parameters["EstimationOf"] == "Parameters":
1462         selfA._parameters["StoreInternalVariables"] = True
1463     #
1464     # Opérateurs
1465     H = HO["Direct"].appliedControledFormTo
1466     #
1467     if selfA._parameters["EstimationOf"] == "State":
1468         M = EM["Direct"].appliedControledFormTo
1469     #
1470     if CM is not None and "Tangent" in CM and U is not None:
1471         Cm = CM["Tangent"].asMatrix(Xb)
1472     else:
1473         Cm = None
1474     #
1475     # Durée d'observation et tailles
1476     if hasattr(Y,"stepnumber"):
1477         duration = Y.stepnumber()
1478         __p = numpy.cumprod(Y.shape())[-1]
1479     else:
1480         duration = 2
1481         __p = numpy.array(Y).size
1482     #
1483     # Précalcul des inversions de B et R
1484     if selfA._parameters["StoreInternalVariables"] \
1485         or selfA._toStore("CostFunctionJ") \
1486         or selfA._toStore("CostFunctionJb") \
1487         or selfA._toStore("CostFunctionJo") \
1488         or selfA._toStore("CurrentOptimum") \
1489         or selfA._toStore("APosterioriCovariance"):
1490         BI = B.getI()
1491         RI = R.getI()
1492     #
1493     __n = Xb.size
1494     #
1495     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
1496         Xn = Xb
1497         Pn = B
1498         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1499         selfA.StoredVariables["Analysis"].store( Xb )
1500         if selfA._toStore("APosterioriCovariance"):
1501             if hasattr(B,"asfullmatrix"):
1502                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
1503             else:
1504                 selfA.StoredVariables["APosterioriCovariance"].store( B )
1505         selfA._setInternalState("seed", numpy.random.get_state())
1506     elif selfA._parameters["nextStep"]:
1507         Xn = selfA._getInternalState("Xn")
1508         Pn = selfA._getInternalState("Pn")
1509     #
1510     if selfA._parameters["EstimationOf"] == "Parameters":
1511         XaMin            = Xn
1512         previousJMinimum = numpy.finfo(float).max
1513     #
1514     for step in range(duration-1):
1515         if hasattr(Y,"store"):
1516             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
1517         else:
1518             Ynpu = numpy.ravel( Y ).reshape((__p,1))
1519         #
1520         Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xn)
1521         Ht = Ht.reshape(Ynpu.size,Xn.size) # ADAO & check shape
1522         Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = Xn)
1523         Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape
1524         #
1525         if selfA._parameters["EstimationOf"] == "State":
1526             Mt = EM["Tangent"].asMatrix(ValueForMethodForm = Xn)
1527             Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape
1528             Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = Xn)
1529             Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape
1530         #
1531         if U is not None:
1532             if hasattr(U,"store") and len(U)>1:
1533                 Un = numpy.ravel( U[step] ).reshape((-1,1))
1534             elif hasattr(U,"store") and len(U)==1:
1535                 Un = numpy.ravel( U[0] ).reshape((-1,1))
1536             else:
1537                 Un = numpy.ravel( U ).reshape((-1,1))
1538         else:
1539             Un = None
1540         #
1541         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
1542             Xn_predicted = numpy.ravel( M( (Xn, Un) ) ).reshape((__n,1))
1543             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
1544                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
1545                 Xn_predicted = Xn_predicted + Cm * Un
1546             Pn_predicted = Q + Mt * Pn * Ma
1547         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
1548             # --- > Par principe, M = Id, Q = 0
1549             Xn_predicted = Xn
1550             Pn_predicted = Pn
1551         #
1552         if selfA._parameters["EstimationOf"] == "State":
1553             HX_predicted = numpy.ravel( H( (Xn_predicted, None) ) ).reshape((__p,1))
1554             _Innovation  = Ynpu - HX_predicted
1555         elif selfA._parameters["EstimationOf"] == "Parameters":
1556             HX_predicted = numpy.ravel( H( (Xn_predicted, Un) ) ).reshape((__p,1))
1557             _Innovation  = Ynpu - HX_predicted
1558             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
1559                 _Innovation = _Innovation - Cm * Un
1560         #
1561         Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
1562         Xn = Xn_predicted + Kn * _Innovation
1563         Pn = Pn_predicted - Kn * Ht * Pn_predicted
1564         #
1565         Xa = Xn # Pointeurs
1566         #--------------------------
1567         selfA._setInternalState("Xn", Xn)
1568         selfA._setInternalState("Pn", Pn)
1569         #--------------------------
1570         #
1571         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1572         # ---> avec analysis
1573         selfA.StoredVariables["Analysis"].store( Xa )
1574         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
1575             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( H((Xa, Un)) )
1576         if selfA._toStore("InnovationAtCurrentAnalysis"):
1577             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
1578         # ---> avec current state
1579         if selfA._parameters["StoreInternalVariables"] \
1580             or selfA._toStore("CurrentState"):
1581             selfA.StoredVariables["CurrentState"].store( Xn )
1582         if selfA._toStore("ForecastState"):
1583             selfA.StoredVariables["ForecastState"].store( Xn_predicted )
1584         if selfA._toStore("ForecastCovariance"):
1585             selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted )
1586         if selfA._toStore("BMA"):
1587             selfA.StoredVariables["BMA"].store( Xn_predicted - Xa )
1588         if selfA._toStore("InnovationAtCurrentState"):
1589             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
1590         if selfA._toStore("SimulatedObservationAtCurrentState") \
1591             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1592             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
1593         # ---> autres
1594         if selfA._parameters["StoreInternalVariables"] \
1595             or selfA._toStore("CostFunctionJ") \
1596             or selfA._toStore("CostFunctionJb") \
1597             or selfA._toStore("CostFunctionJo") \
1598             or selfA._toStore("CurrentOptimum") \
1599             or selfA._toStore("APosterioriCovariance"):
1600             Jb  = float( 0.5 * (Xa - Xb).T @ (BI @ (Xa - Xb)) )
1601             Jo  = float( 0.5 * _Innovation.T @ (RI @ _Innovation) )
1602             J   = Jb + Jo
1603             selfA.StoredVariables["CostFunctionJb"].store( Jb )
1604             selfA.StoredVariables["CostFunctionJo"].store( Jo )
1605             selfA.StoredVariables["CostFunctionJ" ].store( J )
1606             #
1607             if selfA._toStore("IndexOfOptimum") \
1608                 or selfA._toStore("CurrentOptimum") \
1609                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
1610                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
1611                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
1612                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1613                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1614             if selfA._toStore("IndexOfOptimum"):
1615                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1616             if selfA._toStore("CurrentOptimum"):
1617                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
1618             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1619                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
1620             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1621                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1622             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1623                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1624             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1625                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1626         if selfA._toStore("APosterioriCovariance"):
1627             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
1628         if selfA._parameters["EstimationOf"] == "Parameters" \
1629             and J < previousJMinimum:
1630             previousJMinimum    = J
1631             XaMin               = Xa
1632             if selfA._toStore("APosterioriCovariance"):
1633                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
1634     #
1635     # Stockage final supplémentaire de l'optimum en estimation de paramètres
1636     # ----------------------------------------------------------------------
1637     if selfA._parameters["EstimationOf"] == "Parameters":
1638         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1639         selfA.StoredVariables["Analysis"].store( XaMin )
1640         if selfA._toStore("APosterioriCovariance"):
1641             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
1642         if selfA._toStore("BMA"):
1643             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
1644     #
1645     return 0
1646
1647 # ==============================================================================
1648 def ienkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="IEnKF12",
1649     BnotT=False, _epsilon=1.e-3, _e=1.e-7, _jmax=15000):
1650     """
1651     Iterative EnKF
1652     """
1653     if selfA._parameters["EstimationOf"] == "Parameters":
1654         selfA._parameters["StoreInternalVariables"] = True
1655     #
1656     # Opérateurs
1657     H = HO["Direct"].appliedControledFormTo
1658     #
1659     if selfA._parameters["EstimationOf"] == "State":
1660         M = EM["Direct"].appliedControledFormTo
1661     #
1662     if CM is not None and "Tangent" in CM and U is not None:
1663         Cm = CM["Tangent"].asMatrix(Xb)
1664     else:
1665         Cm = None
1666     #
1667     # Durée d'observation et tailles
1668     if hasattr(Y,"stepnumber"):
1669         duration = Y.stepnumber()
1670         __p = numpy.cumprod(Y.shape())[-1]
1671     else:
1672         duration = 2
1673         __p = numpy.array(Y).size
1674     #
1675     # Précalcul des inversions de B et R
1676     if selfA._parameters["StoreInternalVariables"] \
1677         or selfA._toStore("CostFunctionJ") \
1678         or selfA._toStore("CostFunctionJb") \
1679         or selfA._toStore("CostFunctionJo") \
1680         or selfA._toStore("CurrentOptimum") \
1681         or selfA._toStore("APosterioriCovariance"):
1682         BI = B.getI()
1683     RI = R.getI()
1684     #
1685     __n = Xb.size
1686     __m = selfA._parameters["NumberOfMembers"]
1687     #
1688     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
1689         if hasattr(B,"asfullmatrix"): Pn = B.asfullmatrix(__n)
1690         else:                         Pn = B
1691         Xn = EnsembleOfBackgroundPerturbations( Xb, Pn, __m )
1692         selfA.StoredVariables["Analysis"].store( Xb )
1693         if selfA._toStore("APosterioriCovariance"):
1694             if hasattr(B,"asfullmatrix"):
1695                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
1696             else:
1697                 selfA.StoredVariables["APosterioriCovariance"].store( B )
1698         selfA._setInternalState("seed", numpy.random.get_state())
1699     elif selfA._parameters["nextStep"]:
1700         Xn = selfA._getInternalState("Xn")
1701     #
1702     previousJMinimum = numpy.finfo(float).max
1703     #
1704     for step in range(duration-1):
1705         numpy.random.set_state(selfA._getInternalState("seed"))
1706         if hasattr(Y,"store"):
1707             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
1708         else:
1709             Ynpu = numpy.ravel( Y ).reshape((__p,1))
1710         #
1711         if U is not None:
1712             if hasattr(U,"store") and len(U)>1:
1713                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
1714             elif hasattr(U,"store") and len(U)==1:
1715                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
1716             else:
1717                 Un = numpy.asmatrix(numpy.ravel( U )).T
1718         else:
1719             Un = None
1720         #
1721         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
1722             Xn = CovarianceInflation( Xn,
1723                 selfA._parameters["InflationType"],
1724                 selfA._parameters["InflationFactor"],
1725                 )
1726         #
1727         #--------------------------
1728         if VariantM == "IEnKF12":
1729             Xfm = numpy.ravel(Xn.mean(axis=1, dtype=mfp).astype('float'))
1730             EaX = EnsembleOfAnomalies( Xn ) / math.sqrt(__m-1)
1731             __j = 0
1732             Deltaw = 1
1733             if not BnotT:
1734                 Ta  = numpy.identity(__m)
1735             vw  = numpy.zeros(__m)
1736             while numpy.linalg.norm(Deltaw) >= _e and __j <= _jmax:
1737                 vx1 = (Xfm + EaX @ vw).reshape((__n,1))
1738                 #
1739                 if BnotT:
1740                     E1 = vx1 + _epsilon * EaX
1741                 else:
1742                     E1 = vx1 + math.sqrt(__m-1) * EaX @ Ta
1743                 #
1744                 if selfA._parameters["EstimationOf"] == "State": # Forecast + Q
1745                     E2 = M( [(E1[:,i,numpy.newaxis], Un) for i in range(__m)],
1746                         argsAsSerie = True,
1747                         returnSerieAsArrayMatrix = True )
1748                 elif selfA._parameters["EstimationOf"] == "Parameters":
1749                     # --- > Par principe, M = Id
1750                     E2 = Xn
1751                 vx2 = E2.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1752                 vy1 = H((vx2, Un)).reshape((__p,1))
1753                 #
1754                 HE2 = H( [(E2[:,i,numpy.newaxis], Un) for i in range(__m)],
1755                     argsAsSerie = True,
1756                     returnSerieAsArrayMatrix = True )
1757                 vy2 = HE2.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
1758                 #
1759                 if BnotT:
1760                     EaY = (HE2 - vy2) / _epsilon
1761                 else:
1762                     EaY = ( (HE2 - vy2) @ numpy.linalg.inv(Ta) ) / math.sqrt(__m-1)
1763                 #
1764                 GradJ = numpy.ravel(vw[:,None] - EaY.transpose() @ (RI * ( Ynpu - vy1 )))
1765                 mH = numpy.identity(__m) + EaY.transpose() @ (RI * EaY).reshape((-1,__m))
1766                 Deltaw = - numpy.linalg.solve(mH,GradJ)
1767                 #
1768                 vw = vw + Deltaw
1769                 #
1770                 if not BnotT:
1771                     Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
1772                 #
1773                 __j = __j + 1
1774             #
1775             A2 = EnsembleOfAnomalies( E2 )
1776             #
1777             if BnotT:
1778                 Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
1779                 A2 = math.sqrt(__m-1) * A2 @ Ta / _epsilon
1780             #
1781             Xn = vx2 + A2
1782         #--------------------------
1783         else:
1784             raise ValueError("VariantM has to be chosen in the authorized methods list.")
1785         #
1786         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
1787             Xn = CovarianceInflation( Xn,
1788                 selfA._parameters["InflationType"],
1789                 selfA._parameters["InflationFactor"],
1790                 )
1791         #
1792         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1793         #--------------------------
1794         selfA._setInternalState("Xn", Xn)
1795         selfA._setInternalState("seed", numpy.random.get_state())
1796         #--------------------------
1797         #
1798         if selfA._parameters["StoreInternalVariables"] \
1799             or selfA._toStore("CostFunctionJ") \
1800             or selfA._toStore("CostFunctionJb") \
1801             or selfA._toStore("CostFunctionJo") \
1802             or selfA._toStore("APosterioriCovariance") \
1803             or selfA._toStore("InnovationAtCurrentAnalysis") \
1804             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
1805             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1806             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
1807             _Innovation = Ynpu - _HXa
1808         #
1809         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1810         # ---> avec analysis
1811         selfA.StoredVariables["Analysis"].store( Xa )
1812         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
1813             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
1814         if selfA._toStore("InnovationAtCurrentAnalysis"):
1815             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
1816         # ---> avec current state
1817         if selfA._parameters["StoreInternalVariables"] \
1818             or selfA._toStore("CurrentState"):
1819             selfA.StoredVariables["CurrentState"].store( Xn )
1820         if selfA._toStore("ForecastState"):
1821             selfA.StoredVariables["ForecastState"].store( E2 )
1822         if selfA._toStore("ForecastCovariance"):
1823             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(E2) )
1824         if selfA._toStore("BMA"):
1825             selfA.StoredVariables["BMA"].store( E2 - Xa )
1826         if selfA._toStore("InnovationAtCurrentState"):
1827             selfA.StoredVariables["InnovationAtCurrentState"].store( - HE2 + Ynpu )
1828         if selfA._toStore("SimulatedObservationAtCurrentState") \
1829             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1830             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HE2 )
1831         # ---> autres
1832         if selfA._parameters["StoreInternalVariables"] \
1833             or selfA._toStore("CostFunctionJ") \
1834             or selfA._toStore("CostFunctionJb") \
1835             or selfA._toStore("CostFunctionJo") \
1836             or selfA._toStore("CurrentOptimum") \
1837             or selfA._toStore("APosterioriCovariance"):
1838             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
1839             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
1840             J   = Jb + Jo
1841             selfA.StoredVariables["CostFunctionJb"].store( Jb )
1842             selfA.StoredVariables["CostFunctionJo"].store( Jo )
1843             selfA.StoredVariables["CostFunctionJ" ].store( J )
1844             #
1845             if selfA._toStore("IndexOfOptimum") \
1846                 or selfA._toStore("CurrentOptimum") \
1847                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
1848                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
1849                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
1850                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1851                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1852             if selfA._toStore("IndexOfOptimum"):
1853                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1854             if selfA._toStore("CurrentOptimum"):
1855                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
1856             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1857                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
1858             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1859                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1860             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1861                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1862             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1863                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1864         if selfA._toStore("APosterioriCovariance"):
1865             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
1866         if selfA._parameters["EstimationOf"] == "Parameters" \
1867             and J < previousJMinimum:
1868             previousJMinimum    = J
1869             XaMin               = Xa
1870             if selfA._toStore("APosterioriCovariance"):
1871                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
1872         # ---> Pour les smoothers
1873         if selfA._toStore("CurrentEnsembleState"):
1874             selfA.StoredVariables["CurrentEnsembleState"].store( Xn )
1875     #
1876     # Stockage final supplémentaire de l'optimum en estimation de paramètres
1877     # ----------------------------------------------------------------------
1878     if selfA._parameters["EstimationOf"] == "Parameters":
1879         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1880         selfA.StoredVariables["Analysis"].store( XaMin )
1881         if selfA._toStore("APosterioriCovariance"):
1882             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
1883         if selfA._toStore("BMA"):
1884             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
1885     #
1886     return 0
1887
1888 # ==============================================================================
1889 def incr3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
1890     """
1891     3DVAR incrémental
1892     """
1893     #
1894     # Initialisations
1895     # ---------------
1896     #
1897     # Opérateur non-linéaire pour la boucle externe
1898     Hm = HO["Direct"].appliedTo
1899     #
1900     # Précalcul des inversions de B et R
1901     BI = B.getI()
1902     RI = R.getI()
1903     #
1904     # Point de démarrage de l'optimisation
1905     Xini = selfA._parameters["InitializationPoint"]
1906     #
1907     HXb = numpy.asmatrix(numpy.ravel( Hm( Xb ) )).T
1908     Innovation = Y - HXb
1909     #
1910     # Outer Loop
1911     # ----------
1912     iOuter = 0
1913     J      = 1./mpr
1914     DeltaJ = 1./mpr
1915     Xr     = Xini.reshape((-1,1))
1916     while abs(DeltaJ) >= selfA._parameters["CostDecrementTolerance"] and iOuter <= selfA._parameters["MaximumNumberOfSteps"]:
1917         #
1918         # Inner Loop
1919         # ----------
1920         Ht = HO["Tangent"].asMatrix(Xr)
1921         Ht = Ht.reshape(Y.size,Xr.size) # ADAO & check shape
1922         #
1923         # Définition de la fonction-coût
1924         # ------------------------------
1925         def CostFunction(dx):
1926             _dX  = numpy.asmatrix(numpy.ravel( dx )).T
1927             if selfA._parameters["StoreInternalVariables"] or \
1928                 selfA._toStore("CurrentState") or \
1929                 selfA._toStore("CurrentOptimum"):
1930                 selfA.StoredVariables["CurrentState"].store( Xb + _dX )
1931             _HdX = Ht * _dX
1932             _HdX = numpy.asmatrix(numpy.ravel( _HdX )).T
1933             _dInnovation = Innovation - _HdX
1934             if selfA._toStore("SimulatedObservationAtCurrentState") or \
1935                 selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1936                 selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HXb + _HdX )
1937             if selfA._toStore("InnovationAtCurrentState"):
1938                 selfA.StoredVariables["InnovationAtCurrentState"].store( _dInnovation )
1939             #
1940             Jb  = float( 0.5 * _dX.T * BI * _dX )
1941             Jo  = float( 0.5 * _dInnovation.T * RI * _dInnovation )
1942             J   = Jb + Jo
1943             #
1944             selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
1945             selfA.StoredVariables["CostFunctionJb"].store( Jb )
1946             selfA.StoredVariables["CostFunctionJo"].store( Jo )
1947             selfA.StoredVariables["CostFunctionJ" ].store( J )
1948             if selfA._toStore("IndexOfOptimum") or \
1949                 selfA._toStore("CurrentOptimum") or \
1950                 selfA._toStore("CostFunctionJAtCurrentOptimum") or \
1951                 selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
1952                 selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
1953                 selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1954                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1955             if selfA._toStore("IndexOfOptimum"):
1956                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1957             if selfA._toStore("CurrentOptimum"):
1958                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
1959             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1960                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
1961             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1962                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1963             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1964                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1965             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1966                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1967             return J
1968         #
1969         def GradientOfCostFunction(dx):
1970             _dX          = numpy.asmatrix(numpy.ravel( dx )).T
1971             _HdX         = Ht * _dX
1972             _HdX         = numpy.asmatrix(numpy.ravel( _HdX )).T
1973             _dInnovation = Innovation - _HdX
1974             GradJb       = BI * _dX
1975             GradJo       = - Ht.T @ (RI * _dInnovation)
1976             GradJ        = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
1977             return GradJ
1978         #
1979         # Minimisation de la fonctionnelle
1980         # --------------------------------
1981         nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
1982         #
1983         if selfA._parameters["Minimizer"] == "LBFGSB":
1984             # Minimum, J_optimal, Informations = scipy.optimize.fmin_l_bfgs_b(
1985             if "0.19" <= scipy.version.version <= "1.1.0":
1986                 import lbfgsbhlt as optimiseur
1987             else:
1988                 import scipy.optimize as optimiseur
1989             Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
1990                 func        = CostFunction,
1991                 x0          = numpy.zeros(Xini.size),
1992                 fprime      = GradientOfCostFunction,
1993                 args        = (),
1994                 bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
1995                 maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
1996                 factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
1997                 pgtol       = selfA._parameters["ProjectedGradientTolerance"],
1998                 iprint      = selfA._parameters["optiprint"],
1999                 )
2000             nfeval = Informations['funcalls']
2001             rc     = Informations['warnflag']
2002         elif selfA._parameters["Minimizer"] == "TNC":
2003             Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
2004                 func        = CostFunction,
2005                 x0          = numpy.zeros(Xini.size),
2006                 fprime      = GradientOfCostFunction,
2007                 args        = (),
2008                 bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
2009                 maxfun      = selfA._parameters["MaximumNumberOfSteps"],
2010                 pgtol       = selfA._parameters["ProjectedGradientTolerance"],
2011                 ftol        = selfA._parameters["CostDecrementTolerance"],
2012                 messages    = selfA._parameters["optmessages"],
2013                 )
2014         elif selfA._parameters["Minimizer"] == "CG":
2015             Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
2016                 f           = CostFunction,
2017                 x0          = numpy.zeros(Xini.size),
2018                 fprime      = GradientOfCostFunction,
2019                 args        = (),
2020                 maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2021                 gtol        = selfA._parameters["GradientNormTolerance"],
2022                 disp        = selfA._parameters["optdisp"],
2023                 full_output = True,
2024                 )
2025         elif selfA._parameters["Minimizer"] == "NCG":
2026             Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
2027                 f           = CostFunction,
2028                 x0          = numpy.zeros(Xini.size),
2029                 fprime      = GradientOfCostFunction,
2030                 args        = (),
2031                 maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2032                 avextol     = selfA._parameters["CostDecrementTolerance"],
2033                 disp        = selfA._parameters["optdisp"],
2034                 full_output = True,
2035                 )
2036         elif selfA._parameters["Minimizer"] == "BFGS":
2037             Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
2038                 f           = CostFunction,
2039                 x0          = numpy.zeros(Xini.size),
2040                 fprime      = GradientOfCostFunction,
2041                 args        = (),
2042                 maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2043                 gtol        = selfA._parameters["GradientNormTolerance"],
2044                 disp        = selfA._parameters["optdisp"],
2045                 full_output = True,
2046                 )
2047         else:
2048             raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
2049         #
2050         IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2051         MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
2052         #
2053         if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
2054             Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
2055             Minimum = numpy.asmatrix(numpy.ravel( Minimum )).T
2056         else:
2057             Minimum = Xb + numpy.asmatrix(numpy.ravel( Minimum )).T
2058         #
2059         Xr     = Minimum
2060         DeltaJ = selfA.StoredVariables["CostFunctionJ" ][-1] - J
2061         iOuter = selfA.StoredVariables["CurrentIterationNumber"][-1]
2062     #
2063     # Obtention de l'analyse
2064     # ----------------------
2065     Xa = Xr
2066     #
2067     selfA.StoredVariables["Analysis"].store( Xa )
2068     #
2069     if selfA._toStore("OMA") or \
2070         selfA._toStore("SigmaObs2") or \
2071         selfA._toStore("SimulationQuantiles") or \
2072         selfA._toStore("SimulatedObservationAtOptimum"):
2073         if selfA._toStore("SimulatedObservationAtCurrentState"):
2074             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
2075         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2076             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
2077         else:
2078             HXa = Hm( Xa )
2079     #
2080     # Calcul de la covariance d'analyse
2081     # ---------------------------------
2082     if selfA._toStore("APosterioriCovariance") or \
2083         selfA._toStore("SimulationQuantiles") or \
2084         selfA._toStore("JacobianMatrixAtOptimum") or \
2085         selfA._toStore("KalmanGainAtOptimum"):
2086         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
2087         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
2088     if selfA._toStore("APosterioriCovariance") or \
2089         selfA._toStore("SimulationQuantiles") or \
2090         selfA._toStore("KalmanGainAtOptimum"):
2091         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
2092         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
2093     if selfA._toStore("APosterioriCovariance") or \
2094         selfA._toStore("SimulationQuantiles"):
2095         HessienneI = []
2096         nb = Xa.size
2097         for i in range(nb):
2098             _ee    = numpy.matrix(numpy.zeros(nb)).T
2099             _ee[i] = 1.
2100             _HtEE  = numpy.dot(HtM,_ee)
2101             _HtEE  = numpy.asmatrix(numpy.ravel( _HtEE )).T
2102             HessienneI.append( numpy.ravel( BI*_ee + HaM * (RI * _HtEE) ) )
2103         HessienneI = numpy.matrix( HessienneI )
2104         A = HessienneI.I
2105         if min(A.shape) != max(A.shape):
2106             raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(selfA._name,str(A.shape)))
2107         if (numpy.diag(A) < 0).any():
2108             raise ValueError("The %s a posteriori covariance matrix A has at least one negative value on its diagonal. There is an error in the observation operator, please check it."%(selfA._name,))
2109         if logging.getLogger().level < logging.WARNING: # La verification n'a lieu qu'en debug
2110             try:
2111                 L = numpy.linalg.cholesky( A )
2112             except:
2113                 raise ValueError("The %s a posteriori covariance matrix A is not symmetric positive-definite. Please check your a priori covariances and your observation operator."%(selfA._name,))
2114     if selfA._toStore("APosterioriCovariance"):
2115         selfA.StoredVariables["APosterioriCovariance"].store( A )
2116     if selfA._toStore("JacobianMatrixAtOptimum"):
2117         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
2118     if selfA._toStore("KalmanGainAtOptimum"):
2119         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
2120         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
2121         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
2122     #
2123     # Calculs et/ou stockages supplémentaires
2124     # ---------------------------------------
2125     if selfA._toStore("Innovation") or \
2126         selfA._toStore("SigmaObs2") or \
2127         selfA._toStore("MahalanobisConsistency") or \
2128         selfA._toStore("OMB"):
2129         d  = Y - HXb
2130     if selfA._toStore("Innovation"):
2131         selfA.StoredVariables["Innovation"].store( numpy.ravel(d) )
2132     if selfA._toStore("BMA"):
2133         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
2134     if selfA._toStore("OMA"):
2135         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
2136     if selfA._toStore("OMB"):
2137         selfA.StoredVariables["OMB"].store( numpy.ravel(d) )
2138     if selfA._toStore("SigmaObs2"):
2139         TraceR = R.trace(Y.size)
2140         selfA.StoredVariables["SigmaObs2"].store( float( (d.T * (numpy.asmatrix(numpy.ravel(Y)).T-numpy.asmatrix(numpy.ravel(HXa)).T)) ) / TraceR )
2141     if selfA._toStore("MahalanobisConsistency"):
2142         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
2143     if selfA._toStore("SimulationQuantiles"):
2144         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
2145     if selfA._toStore("SimulatedObservationAtBackground"):
2146         selfA.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
2147     if selfA._toStore("SimulatedObservationAtOptimum"):
2148         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
2149     #
2150     return 0
2151
2152 # ==============================================================================
2153 def mlef(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="MLEF13",
2154     BnotT=False, _epsilon=1.e-3, _e=1.e-7, _jmax=15000):
2155     """
2156     Maximum Likelihood Ensemble Filter
2157     """
2158     if selfA._parameters["EstimationOf"] == "Parameters":
2159         selfA._parameters["StoreInternalVariables"] = True
2160     #
2161     # Opérateurs
2162     H = HO["Direct"].appliedControledFormTo
2163     #
2164     if selfA._parameters["EstimationOf"] == "State":
2165         M = EM["Direct"].appliedControledFormTo
2166     #
2167     if CM is not None and "Tangent" in CM and U is not None:
2168         Cm = CM["Tangent"].asMatrix(Xb)
2169     else:
2170         Cm = None
2171     #
2172     # Durée d'observation et tailles
2173     if hasattr(Y,"stepnumber"):
2174         duration = Y.stepnumber()
2175         __p = numpy.cumprod(Y.shape())[-1]
2176     else:
2177         duration = 2
2178         __p = numpy.array(Y).size
2179     #
2180     # Précalcul des inversions de B et R
2181     if selfA._parameters["StoreInternalVariables"] \
2182         or selfA._toStore("CostFunctionJ") \
2183         or selfA._toStore("CostFunctionJb") \
2184         or selfA._toStore("CostFunctionJo") \
2185         or selfA._toStore("CurrentOptimum") \
2186         or selfA._toStore("APosterioriCovariance"):
2187         BI = B.getI()
2188     RI = R.getI()
2189     #
2190     __n = Xb.size
2191     __m = selfA._parameters["NumberOfMembers"]
2192     #
2193     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
2194         Xn = EnsembleOfBackgroundPerturbations( Xb, None, __m )
2195         selfA.StoredVariables["Analysis"].store( Xb )
2196         if selfA._toStore("APosterioriCovariance"):
2197             if hasattr(B,"asfullmatrix"):
2198                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
2199             else:
2200                 selfA.StoredVariables["APosterioriCovariance"].store( B )
2201         selfA._setInternalState("seed", numpy.random.get_state())
2202     elif selfA._parameters["nextStep"]:
2203         Xn = selfA._getInternalState("Xn")
2204     #
2205     previousJMinimum = numpy.finfo(float).max
2206     #
2207     for step in range(duration-1):
2208         numpy.random.set_state(selfA._getInternalState("seed"))
2209         if hasattr(Y,"store"):
2210             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
2211         else:
2212             Ynpu = numpy.ravel( Y ).reshape((__p,1))
2213         #
2214         if U is not None:
2215             if hasattr(U,"store") and len(U)>1:
2216                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
2217             elif hasattr(U,"store") and len(U)==1:
2218                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
2219             else:
2220                 Un = numpy.asmatrix(numpy.ravel( U )).T
2221         else:
2222             Un = None
2223         #
2224         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
2225             Xn = CovarianceInflation( Xn,
2226                 selfA._parameters["InflationType"],
2227                 selfA._parameters["InflationFactor"],
2228                 )
2229         #
2230         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
2231             EMX = M( [(Xn[:,i], Un) for i in range(__m)],
2232                 argsAsSerie = True,
2233                 returnSerieAsArrayMatrix = True )
2234             Xn_predicted = EnsemblePerturbationWithGivenCovariance( EMX, Q )
2235             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
2236                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
2237                 Xn_predicted = Xn_predicted + Cm * Un
2238         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
2239             # --- > Par principe, M = Id, Q = 0
2240             Xn_predicted = EMX = Xn
2241         #
2242         #--------------------------
2243         if VariantM == "MLEF13":
2244             Xfm = numpy.ravel(Xn_predicted.mean(axis=1, dtype=mfp).astype('float'))
2245             EaX = EnsembleOfAnomalies( Xn_predicted, Xfm, 1./math.sqrt(__m-1) )
2246             Ua  = numpy.identity(__m)
2247             __j = 0
2248             Deltaw = 1
2249             if not BnotT:
2250                 Ta  = numpy.identity(__m)
2251             vw  = numpy.zeros(__m)
2252             while numpy.linalg.norm(Deltaw) >= _e and __j <= _jmax:
2253                 vx1 = (Xfm + EaX @ vw).reshape((__n,1))
2254                 #
2255                 if BnotT:
2256                     E1 = vx1 + _epsilon * EaX
2257                 else:
2258                     E1 = vx1 + math.sqrt(__m-1) * EaX @ Ta
2259                 #
2260                 HE2 = H( [(E1[:,i,numpy.newaxis], Un) for i in range(__m)],
2261                     argsAsSerie = True,
2262                     returnSerieAsArrayMatrix = True )
2263                 vy2 = HE2.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
2264                 #
2265                 if BnotT:
2266                     EaY = (HE2 - vy2) / _epsilon
2267                 else:
2268                     EaY = ( (HE2 - vy2) @ numpy.linalg.inv(Ta) ) / math.sqrt(__m-1)
2269                 #
2270                 GradJ = numpy.ravel(vw[:,None] - EaY.transpose() @ (RI * ( Ynpu - vy2 )))
2271                 mH = numpy.identity(__m) + EaY.transpose() @ (RI * EaY).reshape((-1,__m))
2272                 Deltaw = - numpy.linalg.solve(mH,GradJ)
2273                 #
2274                 vw = vw + Deltaw
2275                 #
2276                 if not BnotT:
2277                     Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
2278                 #
2279                 __j = __j + 1
2280             #
2281             if BnotT:
2282                 Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
2283             #
2284             Xn = vx1 + math.sqrt(__m-1) * EaX @ Ta @ Ua
2285         #--------------------------
2286         else:
2287             raise ValueError("VariantM has to be chosen in the authorized methods list.")
2288         #
2289         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
2290             Xn = CovarianceInflation( Xn,
2291                 selfA._parameters["InflationType"],
2292                 selfA._parameters["InflationFactor"],
2293                 )
2294         #
2295         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
2296         #--------------------------
2297         selfA._setInternalState("Xn", Xn)
2298         selfA._setInternalState("seed", numpy.random.get_state())
2299         #--------------------------
2300         #
2301         if selfA._parameters["StoreInternalVariables"] \
2302             or selfA._toStore("CostFunctionJ") \
2303             or selfA._toStore("CostFunctionJb") \
2304             or selfA._toStore("CostFunctionJo") \
2305             or selfA._toStore("APosterioriCovariance") \
2306             or selfA._toStore("InnovationAtCurrentAnalysis") \
2307             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
2308             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2309             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
2310             _Innovation = Ynpu - _HXa
2311         #
2312         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2313         # ---> avec analysis
2314         selfA.StoredVariables["Analysis"].store( Xa )
2315         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
2316             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
2317         if selfA._toStore("InnovationAtCurrentAnalysis"):
2318             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
2319         # ---> avec current state
2320         if selfA._parameters["StoreInternalVariables"] \
2321             or selfA._toStore("CurrentState"):
2322             selfA.StoredVariables["CurrentState"].store( Xn )
2323         if selfA._toStore("ForecastState"):
2324             selfA.StoredVariables["ForecastState"].store( EMX )
2325         if selfA._toStore("ForecastCovariance"):
2326             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(EMX) )
2327         if selfA._toStore("BMA"):
2328             selfA.StoredVariables["BMA"].store( EMX - Xa )
2329         if selfA._toStore("InnovationAtCurrentState"):
2330             selfA.StoredVariables["InnovationAtCurrentState"].store( - HE2 + Ynpu )
2331         if selfA._toStore("SimulatedObservationAtCurrentState") \
2332             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2333             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HE2 )
2334         # ---> autres
2335         if selfA._parameters["StoreInternalVariables"] \
2336             or selfA._toStore("CostFunctionJ") \
2337             or selfA._toStore("CostFunctionJb") \
2338             or selfA._toStore("CostFunctionJo") \
2339             or selfA._toStore("CurrentOptimum") \
2340             or selfA._toStore("APosterioriCovariance"):
2341             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
2342             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
2343             J   = Jb + Jo
2344             selfA.StoredVariables["CostFunctionJb"].store( Jb )
2345             selfA.StoredVariables["CostFunctionJo"].store( Jo )
2346             selfA.StoredVariables["CostFunctionJ" ].store( J )
2347             #
2348             if selfA._toStore("IndexOfOptimum") \
2349                 or selfA._toStore("CurrentOptimum") \
2350                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
2351                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
2352                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
2353                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2354                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2355             if selfA._toStore("IndexOfOptimum"):
2356                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
2357             if selfA._toStore("CurrentOptimum"):
2358                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
2359             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2360                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
2361             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
2362                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
2363             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
2364                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
2365             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
2366                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
2367         if selfA._toStore("APosterioriCovariance"):
2368             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
2369         if selfA._parameters["EstimationOf"] == "Parameters" \
2370             and J < previousJMinimum:
2371             previousJMinimum    = J
2372             XaMin               = Xa
2373             if selfA._toStore("APosterioriCovariance"):
2374                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
2375         # ---> Pour les smoothers
2376         if selfA._toStore("CurrentEnsembleState"):
2377             selfA.StoredVariables["CurrentEnsembleState"].store( Xn )
2378     #
2379     # Stockage final supplémentaire de l'optimum en estimation de paramètres
2380     # ----------------------------------------------------------------------
2381     if selfA._parameters["EstimationOf"] == "Parameters":
2382         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2383         selfA.StoredVariables["Analysis"].store( XaMin )
2384         if selfA._toStore("APosterioriCovariance"):
2385             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
2386         if selfA._toStore("BMA"):
2387             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
2388     #
2389     return 0
2390
2391 # ==============================================================================
2392 def mmqr(
2393         func     = None,
2394         x0       = None,
2395         fprime   = None,
2396         bounds   = None,
2397         quantile = 0.5,
2398         maxfun   = 15000,
2399         toler    = 1.e-06,
2400         y        = None,
2401         ):
2402     """
2403     Implémentation informatique de l'algorithme MMQR, basée sur la publication :
2404     David R. Hunter, Kenneth Lange, "Quantile Regression via an MM Algorithm",
2405     Journal of Computational and Graphical Statistics, 9, 1, pp.60-77, 2000.
2406     """
2407     #
2408     # Recuperation des donnees et informations initiales
2409     # --------------------------------------------------
2410     variables = numpy.ravel( x0 )
2411     mesures   = numpy.ravel( y )
2412     increment = sys.float_info[0]
2413     p         = variables.size
2414     n         = mesures.size
2415     quantile  = float(quantile)
2416     #
2417     # Calcul des parametres du MM
2418     # ---------------------------
2419     tn      = float(toler) / n
2420     e0      = -tn / math.log(tn)
2421     epsilon = (e0-tn)/(1+math.log(e0))
2422     #
2423     # Calculs d'initialisation
2424     # ------------------------
2425     residus  = mesures - numpy.ravel( func( variables ) )
2426     poids    = 1./(epsilon+numpy.abs(residus))
2427     veps     = 1. - 2. * quantile - residus * poids
2428     lastsurrogate = -numpy.sum(residus*veps) - (1.-2.*quantile)*numpy.sum(residus)
2429     iteration = 0
2430     #
2431     # Recherche iterative
2432     # -------------------
2433     while (increment > toler) and (iteration < maxfun) :
2434         iteration += 1
2435         #
2436         Derivees  = numpy.array(fprime(variables))
2437         Derivees  = Derivees.reshape(n,p) # Necessaire pour remettre en place la matrice si elle passe par des tuyaux YACS
2438         DeriveesT = Derivees.transpose()
2439         M         =   numpy.dot( DeriveesT , (numpy.array(numpy.matrix(p*[poids,]).T)*Derivees) )
2440         SM        =   numpy.transpose(numpy.dot( DeriveesT , veps ))
2441         step      = - numpy.linalg.lstsq( M, SM, rcond=-1 )[0]
2442         #
2443         variables = variables + step
2444         if bounds is not None:
2445             # Attention : boucle infinie à éviter si un intervalle est trop petit
2446             while( (variables < numpy.ravel(numpy.asmatrix(bounds)[:,0])).any() or (variables > numpy.ravel(numpy.asmatrix(bounds)[:,1])).any() ):
2447                 step      = step/2.
2448                 variables = variables - step
2449         residus   = mesures - numpy.ravel( func(variables) )
2450         surrogate = numpy.sum(residus**2 * poids) + (4.*quantile-2.) * numpy.sum(residus)
2451         #
2452         while ( (surrogate > lastsurrogate) and ( max(list(numpy.abs(step))) > 1.e-16 ) ) :
2453             step      = step/2.
2454             variables = variables - step
2455             residus   = mesures - numpy.ravel( func(variables) )
2456             surrogate = numpy.sum(residus**2 * poids) + (4.*quantile-2.) * numpy.sum(residus)
2457         #
2458         increment     = lastsurrogate-surrogate
2459         poids         = 1./(epsilon+numpy.abs(residus))
2460         veps          = 1. - 2. * quantile - residus * poids
2461         lastsurrogate = -numpy.sum(residus * veps) - (1.-2.*quantile)*numpy.sum(residus)
2462     #
2463     # Mesure d'écart
2464     # --------------
2465     Ecart = quantile * numpy.sum(residus) - numpy.sum( residus[residus<0] )
2466     #
2467     return variables, Ecart, [n,p,iteration,increment,0]
2468
2469 # ==============================================================================
2470 def multi3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, oneCycle):
2471     """
2472     3DVAR multi-pas et multi-méthodes
2473     """
2474     #
2475     # Initialisation
2476     if selfA._parameters["EstimationOf"] == "State":
2477         M = EM["Direct"].appliedTo
2478         #
2479         if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
2480             Xn = numpy.ravel(Xb).reshape((-1,1))
2481             selfA.StoredVariables["Analysis"].store( Xn )
2482             if selfA._toStore("APosterioriCovariance"):
2483                 if hasattr(B,"asfullmatrix"):
2484                     selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(Xn.size) )
2485                 else:
2486                     selfA.StoredVariables["APosterioriCovariance"].store( B )
2487             if selfA._toStore("ForecastState"):
2488                 selfA.StoredVariables["ForecastState"].store( Xn )
2489         elif selfA._parameters["nextStep"]:
2490             Xn = selfA._getInternalState("Xn")
2491     else:
2492         Xn = numpy.ravel(Xb).reshape((-1,1))
2493     #
2494     if hasattr(Y,"stepnumber"):
2495         duration = Y.stepnumber()
2496     else:
2497         duration = 2
2498     #
2499     # Multi-pas
2500     for step in range(duration-1):
2501         if hasattr(Y,"store"):
2502             Ynpu = numpy.ravel( Y[step+1] ).reshape((-1,1))
2503         else:
2504             Ynpu = numpy.ravel( Y ).reshape((-1,1))
2505         #
2506         if selfA._parameters["EstimationOf"] == "State": # Forecast
2507             Xn_predicted = M( Xn )
2508             if selfA._toStore("ForecastState"):
2509                 selfA.StoredVariables["ForecastState"].store( Xn_predicted )
2510         elif selfA._parameters["EstimationOf"] == "Parameters": # No forecast
2511             # --- > Par principe, M = Id, Q = 0
2512             Xn_predicted = Xn
2513         Xn_predicted = numpy.ravel(Xn_predicted).reshape((-1,1))
2514         #
2515         oneCycle(selfA, Xn_predicted, Ynpu, U, HO, None, None, R, B, None)
2516         #
2517         Xn = selfA.StoredVariables["Analysis"][-1]
2518         #--------------------------
2519         selfA._setInternalState("Xn", Xn)
2520     #
2521     return 0
2522
2523 # ==============================================================================
2524 def psas3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
2525     """
2526     3DVAR PSAS
2527     """
2528     #
2529     # Initialisations
2530     # ---------------
2531     #
2532     # Opérateurs
2533     Hm = HO["Direct"].appliedTo
2534     #
2535     # Utilisation éventuelle d'un vecteur H(Xb) précalculé
2536     if HO["AppliedInX"] is not None and "HXb" in HO["AppliedInX"]:
2537         HXb = Hm( Xb, HO["AppliedInX"]["HXb"] )
2538     else:
2539         HXb = Hm( Xb )
2540     HXb = numpy.asmatrix(numpy.ravel( HXb )).T
2541     if Y.size != HXb.size:
2542         raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
2543     if max(Y.shape) != max(HXb.shape):
2544         raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
2545     #
2546     if selfA._toStore("JacobianMatrixAtBackground"):
2547         HtMb = HO["Tangent"].asMatrix(ValueForMethodForm = Xb)
2548         HtMb = HtMb.reshape(Y.size,Xb.size) # ADAO & check shape
2549         selfA.StoredVariables["JacobianMatrixAtBackground"].store( HtMb )
2550     #
2551     Ht = HO["Tangent"].asMatrix(Xb)
2552     BHT = B * Ht.T
2553     HBHTpR = R + Ht * BHT
2554     Innovation = Y - HXb
2555     #
2556     # Point de démarrage de l'optimisation
2557     Xini = numpy.zeros(Xb.shape)
2558     #
2559     # Définition de la fonction-coût
2560     # ------------------------------
2561     def CostFunction(w):
2562         _W = numpy.asmatrix(numpy.ravel( w )).T
2563         if selfA._parameters["StoreInternalVariables"] or \
2564             selfA._toStore("CurrentState") or \
2565             selfA._toStore("CurrentOptimum"):
2566             selfA.StoredVariables["CurrentState"].store( Xb + BHT * _W )
2567         if selfA._toStore("SimulatedObservationAtCurrentState") or \
2568             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2569             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( Hm( Xb + BHT * _W ) )
2570         if selfA._toStore("InnovationAtCurrentState"):
2571             selfA.StoredVariables["InnovationAtCurrentState"].store( Innovation )
2572         #
2573         Jb  = float( 0.5 * _W.T * HBHTpR * _W )
2574         Jo  = float( - _W.T * Innovation )
2575         J   = Jb + Jo
2576         #
2577         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
2578         selfA.StoredVariables["CostFunctionJb"].store( Jb )
2579         selfA.StoredVariables["CostFunctionJo"].store( Jo )
2580         selfA.StoredVariables["CostFunctionJ" ].store( J )
2581         if selfA._toStore("IndexOfOptimum") or \
2582             selfA._toStore("CurrentOptimum") or \
2583             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
2584             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
2585             selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
2586             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2587             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2588         if selfA._toStore("IndexOfOptimum"):
2589             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
2590         if selfA._toStore("CurrentOptimum"):
2591             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
2592         if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2593             selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
2594         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
2595             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
2596         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
2597             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
2598         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
2599             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
2600         return J
2601     #
2602     def GradientOfCostFunction(w):
2603         _W = numpy.asmatrix(numpy.ravel( w )).T
2604         GradJb  = HBHTpR * _W
2605         GradJo  = - Innovation
2606         GradJ   = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
2607         return GradJ
2608     #
2609     # Minimisation de la fonctionnelle
2610     # --------------------------------
2611     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
2612     #
2613     if selfA._parameters["Minimizer"] == "LBFGSB":
2614         if "0.19" <= scipy.version.version <= "1.1.0":
2615             import lbfgsbhlt as optimiseur
2616         else:
2617             import scipy.optimize as optimiseur
2618         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
2619             func        = CostFunction,
2620             x0          = Xini,
2621             fprime      = GradientOfCostFunction,
2622             args        = (),
2623             bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
2624             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
2625             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
2626             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
2627             iprint      = selfA._parameters["optiprint"],
2628             )
2629         nfeval = Informations['funcalls']
2630         rc     = Informations['warnflag']
2631     elif selfA._parameters["Minimizer"] == "TNC":
2632         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
2633             func        = CostFunction,
2634             x0          = Xini,
2635             fprime      = GradientOfCostFunction,
2636             args        = (),
2637             bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
2638             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
2639             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
2640             ftol        = selfA._parameters["CostDecrementTolerance"],
2641             messages    = selfA._parameters["optmessages"],
2642             )
2643     elif selfA._parameters["Minimizer"] == "CG":
2644         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
2645             f           = CostFunction,
2646             x0          = Xini,
2647             fprime      = GradientOfCostFunction,
2648             args        = (),
2649             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2650             gtol        = selfA._parameters["GradientNormTolerance"],
2651             disp        = selfA._parameters["optdisp"],
2652             full_output = True,
2653             )
2654     elif selfA._parameters["Minimizer"] == "NCG":
2655         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
2656             f           = CostFunction,
2657             x0          = Xini,
2658             fprime      = GradientOfCostFunction,
2659             args        = (),
2660             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2661             avextol     = selfA._parameters["CostDecrementTolerance"],
2662             disp        = selfA._parameters["optdisp"],
2663             full_output = True,
2664             )
2665     elif selfA._parameters["Minimizer"] == "BFGS":
2666         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
2667             f           = CostFunction,
2668             x0          = Xini,
2669             fprime      = GradientOfCostFunction,
2670             args        = (),
2671             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2672             gtol        = selfA._parameters["GradientNormTolerance"],
2673             disp        = selfA._parameters["optdisp"],
2674             full_output = True,
2675             )
2676     else:
2677         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
2678     #
2679     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2680     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
2681     #
2682     # Correction pour pallier a un bug de TNC sur le retour du Minimum
2683     # ----------------------------------------------------------------
2684     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
2685         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
2686         Minimum = numpy.asmatrix(numpy.ravel( Minimum )).T
2687     else:
2688         Minimum = Xb + BHT * numpy.asmatrix(numpy.ravel( Minimum )).T
2689     #
2690     # Obtention de l'analyse
2691     # ----------------------
2692     Xa = Minimum
2693     #
2694     selfA.StoredVariables["Analysis"].store( Xa )
2695     #
2696     if selfA._toStore("OMA") or \
2697         selfA._toStore("SigmaObs2") or \
2698         selfA._toStore("SimulationQuantiles") or \
2699         selfA._toStore("SimulatedObservationAtOptimum"):
2700         if selfA._toStore("SimulatedObservationAtCurrentState"):
2701             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
2702         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2703             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
2704         else:
2705             HXa = Hm( Xa )
2706     #
2707     # Calcul de la covariance d'analyse
2708     # ---------------------------------
2709     if selfA._toStore("APosterioriCovariance") or \
2710         selfA._toStore("SimulationQuantiles") or \
2711         selfA._toStore("JacobianMatrixAtOptimum") or \
2712         selfA._toStore("KalmanGainAtOptimum"):
2713         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
2714         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
2715     if selfA._toStore("APosterioriCovariance") or \
2716         selfA._toStore("SimulationQuantiles") or \
2717         selfA._toStore("KalmanGainAtOptimum"):
2718         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
2719         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
2720     if selfA._toStore("APosterioriCovariance") or \
2721         selfA._toStore("SimulationQuantiles"):
2722         BI = B.getI()
2723         RI = R.getI()
2724         HessienneI = []
2725         nb = Xa.size
2726         for i in range(nb):
2727             _ee    = numpy.matrix(numpy.zeros(nb)).T
2728             _ee[i] = 1.
2729             _HtEE  = numpy.dot(HtM,_ee)
2730             _HtEE  = numpy.asmatrix(numpy.ravel( _HtEE )).T
2731             HessienneI.append( numpy.ravel( BI*_ee + HaM * (RI * _HtEE) ) )
2732         HessienneI = numpy.matrix( HessienneI )
2733         A = HessienneI.I
2734         if min(A.shape) != max(A.shape):
2735             raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(selfA._name,str(A.shape)))
2736         if (numpy.diag(A) < 0).any():
2737             raise ValueError("The %s a posteriori covariance matrix A has at least one negative value on its diagonal. There is an error in the observation operator, please check it."%(selfA._name,))
2738         if logging.getLogger().level < logging.WARNING: # La verification n'a lieu qu'en debug
2739             try:
2740                 L = numpy.linalg.cholesky( A )
2741             except:
2742                 raise ValueError("The %s a posteriori covariance matrix A is not symmetric positive-definite. Please check your a priori covariances and your observation operator."%(selfA._name,))
2743     if selfA._toStore("APosterioriCovariance"):
2744         selfA.StoredVariables["APosterioriCovariance"].store( A )
2745     if selfA._toStore("JacobianMatrixAtOptimum"):
2746         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
2747     if selfA._toStore("KalmanGainAtOptimum"):
2748         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
2749         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
2750         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
2751     #
2752     # Calculs et/ou stockages supplémentaires
2753     # ---------------------------------------
2754     if selfA._toStore("Innovation") or \
2755         selfA._toStore("SigmaObs2") or \
2756         selfA._toStore("MahalanobisConsistency") or \
2757         selfA._toStore("OMB"):
2758         d  = Y - HXb
2759     if selfA._toStore("Innovation"):
2760         selfA.StoredVariables["Innovation"].store( numpy.ravel(d) )
2761     if selfA._toStore("BMA"):
2762         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
2763     if selfA._toStore("OMA"):
2764         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
2765     if selfA._toStore("OMB"):
2766         selfA.StoredVariables["OMB"].store( numpy.ravel(d) )
2767     if selfA._toStore("SigmaObs2"):
2768         TraceR = R.trace(Y.size)
2769         selfA.StoredVariables["SigmaObs2"].store( float( (d.T * (numpy.asmatrix(numpy.ravel(Y)).T-numpy.asmatrix(numpy.ravel(HXa)).T)) ) / TraceR )
2770     if selfA._toStore("MahalanobisConsistency"):
2771         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
2772     if selfA._toStore("SimulationQuantiles"):
2773         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
2774     if selfA._toStore("SimulatedObservationAtBackground"):
2775         selfA.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
2776     if selfA._toStore("SimulatedObservationAtOptimum"):
2777         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
2778     #
2779     return 0
2780
2781 # ==============================================================================
2782 def senkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula16"):
2783     """
2784     Stochastic EnKF
2785     """
2786     if selfA._parameters["EstimationOf"] == "Parameters":
2787         selfA._parameters["StoreInternalVariables"] = True
2788     #
2789     # Opérateurs
2790     H = HO["Direct"].appliedControledFormTo
2791     #
2792     if selfA._parameters["EstimationOf"] == "State":
2793         M = EM["Direct"].appliedControledFormTo
2794     #
2795     if CM is not None and "Tangent" in CM and U is not None:
2796         Cm = CM["Tangent"].asMatrix(Xb)
2797     else:
2798         Cm = None
2799     #
2800     # Durée d'observation et tailles
2801     if hasattr(Y,"stepnumber"):
2802         duration = Y.stepnumber()
2803         __p = numpy.cumprod(Y.shape())[-1]
2804     else:
2805         duration = 2
2806         __p = numpy.array(Y).size
2807     #
2808     # Précalcul des inversions de B et R
2809     if selfA._parameters["StoreInternalVariables"] \
2810         or selfA._toStore("CostFunctionJ") \
2811         or selfA._toStore("CostFunctionJb") \
2812         or selfA._toStore("CostFunctionJo") \
2813         or selfA._toStore("CurrentOptimum") \
2814         or selfA._toStore("APosterioriCovariance"):
2815         BI = B.getI()
2816         RI = R.getI()
2817     #
2818     __n = Xb.size
2819     __m = selfA._parameters["NumberOfMembers"]
2820     #
2821     if hasattr(R,"asfullmatrix"): Rn = R.asfullmatrix(__p)
2822     else:                         Rn = R
2823     #
2824     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
2825         if hasattr(B,"asfullmatrix"): Pn = B.asfullmatrix(__n)
2826         else:                         Pn = B
2827         Xn = EnsembleOfBackgroundPerturbations( Xb, Pn, __m )
2828         selfA.StoredVariables["Analysis"].store( Xb )
2829         if selfA._toStore("APosterioriCovariance"):
2830             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
2831         selfA._setInternalState("seed", numpy.random.get_state())
2832     elif selfA._parameters["nextStep"]:
2833         Xn = selfA._getInternalState("Xn")
2834     #
2835     previousJMinimum = numpy.finfo(float).max
2836     #
2837     for step in range(duration-1):
2838         numpy.random.set_state(selfA._getInternalState("seed"))
2839         if hasattr(Y,"store"):
2840             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
2841         else:
2842             Ynpu = numpy.ravel( Y ).reshape((__p,1))
2843         #
2844         if U is not None:
2845             if hasattr(U,"store") and len(U)>1:
2846                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
2847             elif hasattr(U,"store") and len(U)==1:
2848                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
2849             else:
2850                 Un = numpy.asmatrix(numpy.ravel( U )).T
2851         else:
2852             Un = None
2853         #
2854         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
2855             Xn = CovarianceInflation( Xn,
2856                 selfA._parameters["InflationType"],
2857                 selfA._parameters["InflationFactor"],
2858                 )
2859         #
2860         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
2861             EMX = M( [(Xn[:,i], Un) for i in range(__m)],
2862                 argsAsSerie = True,
2863                 returnSerieAsArrayMatrix = True )
2864             Xn_predicted = EnsemblePerturbationWithGivenCovariance( EMX, Q )
2865             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
2866                 argsAsSerie = True,
2867                 returnSerieAsArrayMatrix = True )
2868             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
2869                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
2870                 Xn_predicted = Xn_predicted + Cm * Un
2871         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
2872             # --- > Par principe, M = Id, Q = 0
2873             Xn_predicted = EMX = Xn
2874             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
2875                 argsAsSerie = True,
2876                 returnSerieAsArrayMatrix = True )
2877         #
2878         # Mean of forecast and observation of forecast
2879         Xfm  = Xn_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
2880         Hfm  = HX_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
2881         #
2882         #--------------------------
2883         if VariantM == "KalmanFilterFormula05":
2884             PfHT, HPfHT = 0., 0.
2885             for i in range(__m):
2886                 Exfi = Xn_predicted[:,i].reshape((__n,1)) - Xfm
2887                 Eyfi = HX_predicted[:,i].reshape((__p,1)) - Hfm
2888                 PfHT  += Exfi * Eyfi.T
2889                 HPfHT += Eyfi * Eyfi.T
2890             PfHT  = (1./(__m-1)) * PfHT
2891             HPfHT = (1./(__m-1)) * HPfHT
2892             Kn     = PfHT * ( R + HPfHT ).I
2893             del PfHT, HPfHT
2894             #
2895             for i in range(__m):
2896                 ri = numpy.random.multivariate_normal(numpy.zeros(__p), Rn)
2897                 Xn[:,i] = numpy.ravel(Xn_predicted[:,i]) + Kn @ (numpy.ravel(Ynpu) + ri - HX_predicted[:,i])
2898         #--------------------------
2899         elif VariantM == "KalmanFilterFormula16":
2900             EpY   = EnsembleOfCenteredPerturbations(Ynpu, Rn, __m)
2901             EpYm  = EpY.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
2902             #
2903             EaX   = EnsembleOfAnomalies( Xn_predicted ) / math.sqrt(__m-1)
2904             EaY = (HX_predicted - Hfm - EpY + EpYm) / math.sqrt(__m-1)
2905             #
2906             Kn = EaX @ EaY.T @ numpy.linalg.inv( EaY @ EaY.T)
2907             #
2908             for i in range(__m):
2909                 Xn[:,i] = numpy.ravel(Xn_predicted[:,i]) + Kn @ (numpy.ravel(EpY[:,i]) - HX_predicted[:,i])
2910         #--------------------------
2911         else:
2912             raise ValueError("VariantM has to be chosen in the authorized methods list.")
2913         #
2914         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
2915             Xn = CovarianceInflation( Xn,
2916                 selfA._parameters["InflationType"],
2917                 selfA._parameters["InflationFactor"],
2918                 )
2919         #
2920         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
2921         #--------------------------
2922         selfA._setInternalState("Xn", Xn)
2923         selfA._setInternalState("seed", numpy.random.get_state())
2924         #--------------------------
2925         #
2926         if selfA._parameters["StoreInternalVariables"] \
2927             or selfA._toStore("CostFunctionJ") \
2928             or selfA._toStore("CostFunctionJb") \
2929             or selfA._toStore("CostFunctionJo") \
2930             or selfA._toStore("APosterioriCovariance") \
2931             or selfA._toStore("InnovationAtCurrentAnalysis") \
2932             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
2933             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2934             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
2935             _Innovation = Ynpu - _HXa
2936         #
2937         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2938         # ---> avec analysis
2939         selfA.StoredVariables["Analysis"].store( Xa )
2940         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
2941             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
2942         if selfA._toStore("InnovationAtCurrentAnalysis"):
2943             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
2944         # ---> avec current state
2945         if selfA._parameters["StoreInternalVariables"] \
2946             or selfA._toStore("CurrentState"):
2947             selfA.StoredVariables["CurrentState"].store( Xn )
2948         if selfA._toStore("ForecastState"):
2949             selfA.StoredVariables["ForecastState"].store( EMX )
2950         if selfA._toStore("ForecastCovariance"):
2951             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(EMX) )
2952         if selfA._toStore("BMA"):
2953             selfA.StoredVariables["BMA"].store( EMX - Xa )
2954         if selfA._toStore("InnovationAtCurrentState"):
2955             selfA.StoredVariables["InnovationAtCurrentState"].store( - HX_predicted + Ynpu )
2956         if selfA._toStore("SimulatedObservationAtCurrentState") \
2957             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2958             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
2959         # ---> autres
2960         if selfA._parameters["StoreInternalVariables"] \
2961             or selfA._toStore("CostFunctionJ") \
2962             or selfA._toStore("CostFunctionJb") \
2963             or selfA._toStore("CostFunctionJo") \
2964             or selfA._toStore("CurrentOptimum") \
2965             or selfA._toStore("APosterioriCovariance"):
2966             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
2967             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
2968             J   = Jb + Jo
2969             selfA.StoredVariables["CostFunctionJb"].store( Jb )
2970             selfA.StoredVariables["CostFunctionJo"].store( Jo )
2971             selfA.StoredVariables["CostFunctionJ" ].store( J )
2972             #
2973             if selfA._toStore("IndexOfOptimum") \
2974                 or selfA._toStore("CurrentOptimum") \
2975                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
2976                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
2977                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
2978                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2979                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2980             if selfA._toStore("IndexOfOptimum"):
2981                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
2982             if selfA._toStore("CurrentOptimum"):
2983                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
2984             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2985                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
2986             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
2987                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
2988             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
2989                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
2990             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
2991                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
2992         if selfA._toStore("APosterioriCovariance"):
2993             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
2994         if selfA._parameters["EstimationOf"] == "Parameters" \
2995             and J < previousJMinimum:
2996             previousJMinimum    = J
2997             XaMin               = Xa
2998             if selfA._toStore("APosterioriCovariance"):
2999                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
3000         # ---> Pour les smoothers
3001         if selfA._toStore("CurrentEnsembleState"):
3002             selfA.StoredVariables["CurrentEnsembleState"].store( Xn )
3003     #
3004     # Stockage final supplémentaire de l'optimum en estimation de paramètres
3005     # ----------------------------------------------------------------------
3006     if selfA._parameters["EstimationOf"] == "Parameters":
3007         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3008         selfA.StoredVariables["Analysis"].store( XaMin )
3009         if selfA._toStore("APosterioriCovariance"):
3010             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
3011         if selfA._toStore("BMA"):
3012             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
3013     #
3014     return 0
3015
3016 # ==============================================================================
3017 def std3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3018     """
3019     3DVAR
3020     """
3021     #
3022     # Initialisations
3023     # ---------------
3024     #
3025     # Opérateurs
3026     Hm = HO["Direct"].appliedTo
3027     Ha = HO["Adjoint"].appliedInXTo
3028     #
3029     # Utilisation éventuelle d'un vecteur H(Xb) précalculé
3030     if HO["AppliedInX"] is not None and "HXb" in HO["AppliedInX"]:
3031         HXb = Hm( Xb, HO["AppliedInX"]["HXb"] )
3032     else:
3033         HXb = Hm( Xb )
3034     HXb = HXb.reshape((-1,1))
3035     if Y.size != HXb.size:
3036         raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
3037     if max(Y.shape) != max(HXb.shape):
3038         raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
3039     #
3040     if selfA._toStore("JacobianMatrixAtBackground"):
3041         HtMb = HO["Tangent"].asMatrix(ValueForMethodForm = Xb)
3042         HtMb = HtMb.reshape(Y.size,Xb.size) # ADAO & check shape
3043         selfA.StoredVariables["JacobianMatrixAtBackground"].store( HtMb )
3044     #
3045     # Précalcul des inversions de B et R
3046     BI = B.getI()
3047     RI = R.getI()
3048     #
3049     # Point de démarrage de l'optimisation
3050     Xini = selfA._parameters["InitializationPoint"]
3051     #
3052     # Définition de la fonction-coût
3053     # ------------------------------
3054     def CostFunction(x):
3055         _X  = numpy.ravel( x ).reshape((-1,1))
3056         if selfA._parameters["StoreInternalVariables"] or \
3057             selfA._toStore("CurrentState") or \
3058             selfA._toStore("CurrentOptimum"):
3059             selfA.StoredVariables["CurrentState"].store( _X )
3060         _HX = Hm( _X ).reshape((-1,1))
3061         _Innovation = Y - _HX
3062         if selfA._toStore("SimulatedObservationAtCurrentState") or \
3063             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3064             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
3065         if selfA._toStore("InnovationAtCurrentState"):
3066             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
3067         #
3068         Jb  = float( 0.5 * (_X - Xb).T * (BI * (_X - Xb)) )
3069         Jo  = float( 0.5 * _Innovation.T * (RI * _Innovation) )
3070         J   = Jb + Jo
3071         #
3072         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
3073         selfA.StoredVariables["CostFunctionJb"].store( Jb )
3074         selfA.StoredVariables["CostFunctionJo"].store( Jo )
3075         selfA.StoredVariables["CostFunctionJ" ].store( J )
3076         if selfA._toStore("IndexOfOptimum") or \
3077             selfA._toStore("CurrentOptimum") or \
3078             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
3079             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
3080             selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
3081             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3082             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3083         if selfA._toStore("IndexOfOptimum"):
3084             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3085         if selfA._toStore("CurrentOptimum"):
3086             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
3087         if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3088             selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
3089         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3090             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3091         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3092             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3093         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3094             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3095         return J
3096     #
3097     def GradientOfCostFunction(x):
3098         _X      = numpy.ravel( x ).reshape((-1,1))
3099         _HX     = Hm( _X )
3100         _HX     = numpy.ravel( _HX ).reshape((-1,1))
3101         GradJb  = BI * (_X - Xb)
3102         GradJo  = - Ha( (_X, RI * (Y - _HX)) )
3103         GradJ   = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
3104         return GradJ
3105     #
3106     # Minimisation de la fonctionnelle
3107     # --------------------------------
3108     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
3109     #
3110     if selfA._parameters["Minimizer"] == "LBFGSB":
3111         if "0.19" <= scipy.version.version <= "1.1.0":
3112             import lbfgsbhlt as optimiseur
3113         else:
3114             import scipy.optimize as optimiseur
3115         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
3116             func        = CostFunction,
3117             x0          = Xini,
3118             fprime      = GradientOfCostFunction,
3119             args        = (),
3120             bounds      = selfA._parameters["Bounds"],
3121             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
3122             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
3123             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3124             iprint      = selfA._parameters["optiprint"],
3125             )
3126         nfeval = Informations['funcalls']
3127         rc     = Informations['warnflag']
3128     elif selfA._parameters["Minimizer"] == "TNC":
3129         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
3130             func        = CostFunction,
3131             x0          = Xini,
3132             fprime      = GradientOfCostFunction,
3133             args        = (),
3134             bounds      = selfA._parameters["Bounds"],
3135             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
3136             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3137             ftol        = selfA._parameters["CostDecrementTolerance"],
3138             messages    = selfA._parameters["optmessages"],
3139             )
3140     elif selfA._parameters["Minimizer"] == "CG":
3141         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
3142             f           = CostFunction,
3143             x0          = Xini,
3144             fprime      = GradientOfCostFunction,
3145             args        = (),
3146             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3147             gtol        = selfA._parameters["GradientNormTolerance"],
3148             disp        = selfA._parameters["optdisp"],
3149             full_output = True,
3150             )
3151     elif selfA._parameters["Minimizer"] == "NCG":
3152         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
3153             f           = CostFunction,
3154             x0          = Xini,
3155             fprime      = GradientOfCostFunction,
3156             args        = (),
3157             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3158             avextol     = selfA._parameters["CostDecrementTolerance"],
3159             disp        = selfA._parameters["optdisp"],
3160             full_output = True,
3161             )
3162     elif selfA._parameters["Minimizer"] == "BFGS":
3163         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
3164             f           = CostFunction,
3165             x0          = Xini,
3166             fprime      = GradientOfCostFunction,
3167             args        = (),
3168             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3169             gtol        = selfA._parameters["GradientNormTolerance"],
3170             disp        = selfA._parameters["optdisp"],
3171             full_output = True,
3172             )
3173     else:
3174         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
3175     #
3176     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3177     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
3178     #
3179     # Correction pour pallier a un bug de TNC sur le retour du Minimum
3180     # ----------------------------------------------------------------
3181     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
3182         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
3183     #
3184     Xa = Minimum
3185     #--------------------------
3186     #
3187     selfA.StoredVariables["Analysis"].store( Xa )
3188     #
3189     if selfA._toStore("OMA") or \
3190         selfA._toStore("SigmaObs2") or \
3191         selfA._toStore("SimulationQuantiles") or \
3192         selfA._toStore("SimulatedObservationAtOptimum"):
3193         if selfA._toStore("SimulatedObservationAtCurrentState"):
3194             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
3195         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3196             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
3197         else:
3198             HXa = Hm( Xa )
3199     #
3200     if selfA._toStore("APosterioriCovariance") or \
3201         selfA._toStore("SimulationQuantiles") or \
3202         selfA._toStore("JacobianMatrixAtOptimum") or \
3203         selfA._toStore("KalmanGainAtOptimum"):
3204         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
3205         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
3206     if selfA._toStore("APosterioriCovariance") or \
3207         selfA._toStore("SimulationQuantiles") or \
3208         selfA._toStore("KalmanGainAtOptimum"):
3209         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
3210         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
3211     if selfA._toStore("APosterioriCovariance") or \
3212         selfA._toStore("SimulationQuantiles"):
3213         HessienneI = []
3214         nb = Xa.size
3215         for i in range(nb):
3216             _ee    = numpy.zeros(nb)
3217             _ee[i] = 1.
3218             _HtEE  = numpy.dot(HtM,_ee)
3219             HessienneI.append( numpy.ravel( BI * _ee.reshape((-1,1)) + HaM * (RI * _HtEE.reshape((-1,1))) ) )
3220         A = numpy.linalg.inv(numpy.array( HessienneI ))
3221         if min(A.shape) != max(A.shape):
3222             raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(selfA._name,str(A.shape)))
3223         if (numpy.diag(A) < 0).any():
3224             raise ValueError("The %s a posteriori covariance matrix A has at least one negative value on its diagonal. There is an error in the observation operator, please check it."%(selfA._name,))
3225         if logging.getLogger().level < logging.WARNING: # La verification n'a lieu qu'en debug
3226             try:
3227                 L = numpy.linalg.cholesky( A )
3228             except:
3229                 raise ValueError("The %s a posteriori covariance matrix A is not symmetric positive-definite. Please check your a priori covariances and your observation operator."%(selfA._name,))
3230     if selfA._toStore("APosterioriCovariance"):
3231         selfA.StoredVariables["APosterioriCovariance"].store( A )
3232     if selfA._toStore("JacobianMatrixAtOptimum"):
3233         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
3234     if selfA._toStore("KalmanGainAtOptimum"):
3235         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
3236         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
3237         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
3238     #
3239     if selfA._toStore("Innovation") or \
3240         selfA._toStore("SigmaObs2") or \
3241         selfA._toStore("MahalanobisConsistency") or \
3242         selfA._toStore("OMB"):
3243         d  = Y - HXb
3244     if selfA._toStore("Innovation"):
3245         selfA.StoredVariables["Innovation"].store( d )
3246     if selfA._toStore("BMA"):
3247         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
3248     if selfA._toStore("OMA"):
3249         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
3250     if selfA._toStore("OMB"):
3251         selfA.StoredVariables["OMB"].store( d )
3252     if selfA._toStore("SigmaObs2"):
3253         TraceR = R.trace(Y.size)
3254         selfA.StoredVariables["SigmaObs2"].store( float( (d.T @ (numpy.ravel(Y)-numpy.ravel(HXa))) ) / TraceR )
3255     if selfA._toStore("MahalanobisConsistency"):
3256         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
3257     if selfA._toStore("SimulationQuantiles"):
3258         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
3259     if selfA._toStore("SimulatedObservationAtBackground"):
3260         selfA.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
3261     if selfA._toStore("SimulatedObservationAtOptimum"):
3262         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
3263     #
3264     return 0
3265
3266 # ==============================================================================
3267 def std4dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3268     """
3269     4DVAR
3270     """
3271     #
3272     # Initialisations
3273     # ---------------
3274     #
3275     # Opérateurs
3276     Hm = HO["Direct"].appliedControledFormTo
3277     Mm = EM["Direct"].appliedControledFormTo
3278     #
3279     if CM is not None and "Tangent" in CM and U is not None:
3280         Cm = CM["Tangent"].asMatrix(Xb)
3281     else:
3282         Cm = None
3283     #
3284     def Un(_step):
3285         if U is not None:
3286             if hasattr(U,"store") and 1<=_step<len(U) :
3287                 _Un = numpy.asmatrix(numpy.ravel( U[_step] )).T
3288             elif hasattr(U,"store") and len(U)==1:
3289                 _Un = numpy.asmatrix(numpy.ravel( U[0] )).T
3290             else:
3291                 _Un = numpy.asmatrix(numpy.ravel( U )).T
3292         else:
3293             _Un = None
3294         return _Un
3295     def CmUn(_xn,_un):
3296         if Cm is not None and _un is not None: # Attention : si Cm est aussi dans M, doublon !
3297             _Cm   = Cm.reshape(_xn.size,_un.size) # ADAO & check shape
3298             _CmUn = _Cm * _un
3299         else:
3300             _CmUn = 0.
3301         return _CmUn
3302     #
3303     # Remarque : les observations sont exploitées à partir du pas de temps
3304     # numéro 1, et sont utilisées dans Yo comme rangées selon ces indices.
3305     # Donc le pas 0 n'est pas utilisé puisque la première étape commence
3306     # avec l'observation du pas 1.
3307     #
3308     # Nombre de pas identique au nombre de pas d'observations
3309     if hasattr(Y,"stepnumber"):
3310         duration = Y.stepnumber()
3311     else:
3312         duration = 2
3313     #
3314     # Précalcul des inversions de B et R
3315     BI = B.getI()
3316     RI = R.getI()
3317     #
3318     # Point de démarrage de l'optimisation
3319     Xini = selfA._parameters["InitializationPoint"]
3320     #
3321     # Définition de la fonction-coût
3322     # ------------------------------
3323     selfA.DirectCalculation = [None,] # Le pas 0 n'est pas observé
3324     selfA.DirectInnovation  = [None,] # Le pas 0 n'est pas observé
3325     def CostFunction(x):
3326         _X  = numpy.asmatrix(numpy.ravel( x )).T
3327         if selfA._parameters["StoreInternalVariables"] or \
3328             selfA._toStore("CurrentState") or \
3329             selfA._toStore("CurrentOptimum"):
3330             selfA.StoredVariables["CurrentState"].store( _X )
3331         Jb  = float( 0.5 * (_X - Xb).T * BI * (_X - Xb) )
3332         selfA.DirectCalculation = [None,]
3333         selfA.DirectInnovation  = [None,]
3334         Jo  = 0.
3335         _Xn = _X
3336         for step in range(0,duration-1):
3337             if hasattr(Y,"store"):
3338                 _Ynpu = numpy.asmatrix(numpy.ravel( Y[step+1] )).T
3339             else:
3340                 _Ynpu = numpy.asmatrix(numpy.ravel( Y )).T
3341             _Un = Un(step)
3342             #
3343             # Etape d'évolution
3344             if selfA._parameters["EstimationOf"] == "State":
3345                 _Xn = Mm( (_Xn, _Un) ) + CmUn(_Xn, _Un)
3346             elif selfA._parameters["EstimationOf"] == "Parameters":
3347                 pass
3348             #
3349             if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3350                 _Xn = ApplyBounds( _Xn, ForceNumericBounds(selfA._parameters["Bounds"]) )
3351             #
3352             # Etape de différence aux observations
3353             if selfA._parameters["EstimationOf"] == "State":
3354                 _YmHMX = _Ynpu - numpy.asmatrix(numpy.ravel( Hm( (_Xn, None) ) )).T
3355             elif selfA._parameters["EstimationOf"] == "Parameters":
3356                 _YmHMX = _Ynpu - numpy.asmatrix(numpy.ravel( Hm( (_Xn, _Un) ) )).T - CmUn(_Xn, _Un)
3357             #
3358             # Stockage de l'état
3359             selfA.DirectCalculation.append( _Xn )
3360             selfA.DirectInnovation.append( _YmHMX )
3361             #
3362             # Ajout dans la fonctionnelle d'observation
3363             Jo = Jo + 0.5 * float( _YmHMX.T * RI * _YmHMX )
3364         J = Jb + Jo
3365         #
3366         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
3367         selfA.StoredVariables["CostFunctionJb"].store( Jb )
3368         selfA.StoredVariables["CostFunctionJo"].store( Jo )
3369         selfA.StoredVariables["CostFunctionJ" ].store( J )
3370         if selfA._toStore("IndexOfOptimum") or \
3371             selfA._toStore("CurrentOptimum") or \
3372             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
3373             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
3374             selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3375             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3376         if selfA._toStore("IndexOfOptimum"):
3377             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3378         if selfA._toStore("CurrentOptimum"):
3379             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
3380         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3381             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3382         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3383             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3384         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3385             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3386         return J
3387     #
3388     def GradientOfCostFunction(x):
3389         _X      = numpy.asmatrix(numpy.ravel( x )).T
3390         GradJb  = BI * (_X - Xb)
3391         GradJo  = 0.
3392         for step in range(duration-1,0,-1):
3393             # Étape de récupération du dernier stockage de l'évolution
3394             _Xn = selfA.DirectCalculation.pop()
3395             # Étape de récupération du dernier stockage de l'innovation
3396             _YmHMX = selfA.DirectInnovation.pop()
3397             # Calcul des adjoints
3398             Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = _Xn)
3399             Ha = Ha.reshape(_Xn.size,_YmHMX.size) # ADAO & check shape
3400             Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = _Xn)
3401             Ma = Ma.reshape(_Xn.size,_Xn.size) # ADAO & check shape
3402             # Calcul du gradient par état adjoint
3403             GradJo = GradJo + Ha * RI * _YmHMX # Équivaut pour Ha linéaire à : Ha( (_Xn, RI * _YmHMX) )
3404             GradJo = Ma * GradJo               # Équivaut pour Ma linéaire à : Ma( (_Xn, GradJo) )
3405         GradJ = numpy.ravel( GradJb ) - numpy.ravel( GradJo )
3406         return GradJ
3407     #
3408     # Minimisation de la fonctionnelle
3409     # --------------------------------
3410     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
3411     #
3412     if selfA._parameters["Minimizer"] == "LBFGSB":
3413         if "0.19" <= scipy.version.version <= "1.1.0":
3414             import lbfgsbhlt as optimiseur
3415         else:
3416             import scipy.optimize as optimiseur
3417         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
3418             func        = CostFunction,
3419             x0          = Xini,
3420             fprime      = GradientOfCostFunction,
3421             args        = (),
3422             bounds      = selfA._parameters["Bounds"],
3423             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
3424             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
3425             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3426             iprint      = selfA._parameters["optiprint"],
3427             )
3428         nfeval = Informations['funcalls']
3429         rc     = Informations['warnflag']
3430     elif selfA._parameters["Minimizer"] == "TNC":
3431         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
3432             func        = CostFunction,
3433             x0          = Xini,
3434             fprime      = GradientOfCostFunction,
3435             args        = (),
3436             bounds      = selfA._parameters["Bounds"],
3437             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
3438             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3439             ftol        = selfA._parameters["CostDecrementTolerance"],
3440             messages    = selfA._parameters["optmessages"],
3441             )
3442     elif selfA._parameters["Minimizer"] == "CG":
3443         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
3444             f           = CostFunction,
3445             x0          = Xini,
3446             fprime      = GradientOfCostFunction,
3447             args        = (),
3448             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3449             gtol        = selfA._parameters["GradientNormTolerance"],
3450             disp        = selfA._parameters["optdisp"],
3451             full_output = True,
3452             )
3453     elif selfA._parameters["Minimizer"] == "NCG":
3454         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
3455             f           = CostFunction,
3456             x0          = Xini,
3457             fprime      = GradientOfCostFunction,
3458             args        = (),
3459             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3460             avextol     = selfA._parameters["CostDecrementTolerance"],
3461             disp        = selfA._parameters["optdisp"],
3462             full_output = True,
3463             )
3464     elif selfA._parameters["Minimizer"] == "BFGS":
3465         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
3466             f           = CostFunction,
3467             x0          = Xini,
3468             fprime      = GradientOfCostFunction,
3469             args        = (),
3470             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3471             gtol        = selfA._parameters["GradientNormTolerance"],
3472             disp        = selfA._parameters["optdisp"],
3473             full_output = True,
3474             )
3475     else:
3476         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
3477     #
3478     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3479     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
3480     #
3481     # Correction pour pallier a un bug de TNC sur le retour du Minimum
3482     # ----------------------------------------------------------------
3483     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
3484         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
3485     #
3486     # Obtention de l'analyse
3487     # ----------------------
3488     Xa = numpy.asmatrix(numpy.ravel( Minimum )).T
3489     #
3490     selfA.StoredVariables["Analysis"].store( Xa )
3491     #
3492     # Calculs et/ou stockages supplémentaires
3493     # ---------------------------------------
3494     if selfA._toStore("BMA"):
3495         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
3496     #
3497     return 0
3498
3499 # ==============================================================================
3500 def stdkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3501     """
3502     Standard Kalman Filter
3503     """
3504     if selfA._parameters["EstimationOf"] == "Parameters":
3505         selfA._parameters["StoreInternalVariables"] = True
3506     #
3507     # Opérateurs
3508     # ----------
3509     Ht = HO["Tangent"].asMatrix(Xb)
3510     Ha = HO["Adjoint"].asMatrix(Xb)
3511     #
3512     if selfA._parameters["EstimationOf"] == "State":
3513         Mt = EM["Tangent"].asMatrix(Xb)
3514         Ma = EM["Adjoint"].asMatrix(Xb)
3515     #
3516     if CM is not None and "Tangent" in CM and U is not None:
3517         Cm = CM["Tangent"].asMatrix(Xb)
3518     else:
3519         Cm = None
3520     #
3521     # Durée d'observation et tailles
3522     if hasattr(Y,"stepnumber"):
3523         duration = Y.stepnumber()
3524         __p = numpy.cumprod(Y.shape())[-1]
3525     else:
3526         duration = 2
3527         __p = numpy.array(Y).size
3528     #
3529     # Précalcul des inversions de B et R
3530     if selfA._parameters["StoreInternalVariables"] \
3531         or selfA._toStore("CostFunctionJ") \
3532         or selfA._toStore("CostFunctionJb") \
3533         or selfA._toStore("CostFunctionJo") \
3534         or selfA._toStore("CurrentOptimum") \
3535         or selfA._toStore("APosterioriCovariance"):
3536         BI = B.getI()
3537         RI = R.getI()
3538     #
3539     __n = Xb.size
3540     #
3541     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
3542         Xn = Xb
3543         Pn = B
3544         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3545         selfA.StoredVariables["Analysis"].store( Xb )
3546         if selfA._toStore("APosterioriCovariance"):
3547             if hasattr(B,"asfullmatrix"):
3548                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
3549             else:
3550                 selfA.StoredVariables["APosterioriCovariance"].store( B )
3551         selfA._setInternalState("seed", numpy.random.get_state())
3552     elif selfA._parameters["nextStep"]:
3553         Xn = selfA._getInternalState("Xn")
3554         Pn = selfA._getInternalState("Pn")
3555     #
3556     if selfA._parameters["EstimationOf"] == "Parameters":
3557         XaMin            = Xn
3558         previousJMinimum = numpy.finfo(float).max
3559     #
3560     for step in range(duration-1):
3561         if hasattr(Y,"store"):
3562             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
3563         else:
3564             Ynpu = numpy.ravel( Y ).reshape((__p,1))
3565         #
3566         if U is not None:
3567             if hasattr(U,"store") and len(U)>1:
3568                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
3569             elif hasattr(U,"store") and len(U)==1:
3570                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
3571             else:
3572                 Un = numpy.asmatrix(numpy.ravel( U )).T
3573         else:
3574             Un = None
3575         #
3576         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
3577             Xn_predicted = Mt * Xn
3578             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
3579                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
3580                 Xn_predicted = Xn_predicted + Cm * Un
3581             Pn_predicted = Q + Mt * Pn * Ma
3582         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
3583             # --- > Par principe, M = Id, Q = 0
3584             Xn_predicted = Xn
3585             Pn_predicted = Pn
3586         #
3587         if selfA._parameters["EstimationOf"] == "State":
3588             HX_predicted = Ht * Xn_predicted
3589             _Innovation  = Ynpu - HX_predicted
3590         elif selfA._parameters["EstimationOf"] == "Parameters":
3591             HX_predicted = Ht * Xn_predicted
3592             _Innovation  = Ynpu - HX_predicted
3593             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
3594                 _Innovation = _Innovation - Cm * Un
3595         #
3596         Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
3597         Xn = Xn_predicted + Kn * _Innovation
3598         Pn = Pn_predicted - Kn * Ht * Pn_predicted
3599         #
3600         Xa = Xn # Pointeurs
3601         #--------------------------
3602         selfA._setInternalState("Xn", Xn)
3603         selfA._setInternalState("Pn", Pn)
3604         #--------------------------
3605         #
3606         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3607         # ---> avec analysis
3608         selfA.StoredVariables["Analysis"].store( Xa )
3609         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
3610             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( Ht * Xa )
3611         if selfA._toStore("InnovationAtCurrentAnalysis"):
3612             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
3613         # ---> avec current state
3614         if selfA._parameters["StoreInternalVariables"] \
3615             or selfA._toStore("CurrentState"):
3616             selfA.StoredVariables["CurrentState"].store( Xn )
3617         if selfA._toStore("ForecastState"):
3618             selfA.StoredVariables["ForecastState"].store( Xn_predicted )
3619         if selfA._toStore("ForecastCovariance"):
3620             selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted )
3621         if selfA._toStore("BMA"):
3622             selfA.StoredVariables["BMA"].store( Xn_predicted - Xa )
3623         if selfA._toStore("InnovationAtCurrentState"):
3624             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
3625         if selfA._toStore("SimulatedObservationAtCurrentState") \
3626             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3627             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
3628         # ---> autres
3629         if selfA._parameters["StoreInternalVariables"] \
3630             or selfA._toStore("CostFunctionJ") \
3631             or selfA._toStore("CostFunctionJb") \
3632             or selfA._toStore("CostFunctionJo") \
3633             or selfA._toStore("CurrentOptimum") \
3634             or selfA._toStore("APosterioriCovariance"):
3635             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
3636             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
3637             J   = Jb + Jo
3638             selfA.StoredVariables["CostFunctionJb"].store( Jb )
3639             selfA.StoredVariables["CostFunctionJo"].store( Jo )
3640             selfA.StoredVariables["CostFunctionJ" ].store( J )
3641             #
3642             if selfA._toStore("IndexOfOptimum") \
3643                 or selfA._toStore("CurrentOptimum") \
3644                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
3645                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
3646                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
3647                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3648                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3649             if selfA._toStore("IndexOfOptimum"):
3650                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3651             if selfA._toStore("CurrentOptimum"):
3652                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
3653             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3654                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
3655             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3656                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3657             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3658                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3659             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3660                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3661         if selfA._toStore("APosterioriCovariance"):
3662             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
3663         if selfA._parameters["EstimationOf"] == "Parameters" \
3664             and J < previousJMinimum:
3665             previousJMinimum    = J
3666             XaMin               = Xa
3667             if selfA._toStore("APosterioriCovariance"):
3668                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
3669     #
3670     # Stockage final supplémentaire de l'optimum en estimation de paramètres
3671     # ----------------------------------------------------------------------
3672     if selfA._parameters["EstimationOf"] == "Parameters":
3673         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3674         selfA.StoredVariables["Analysis"].store( XaMin )
3675         if selfA._toStore("APosterioriCovariance"):
3676             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
3677         if selfA._toStore("BMA"):
3678             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
3679     #
3680     return 0
3681
3682 # ==============================================================================
3683 def uckf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3684     """
3685     Constrained Unscented Kalman Filter
3686     """
3687     if selfA._parameters["EstimationOf"] == "Parameters":
3688         selfA._parameters["StoreInternalVariables"] = True
3689     selfA._parameters["Bounds"] = ForceNumericBounds( selfA._parameters["Bounds"] )
3690     #
3691     L     = Xb.size
3692     Alpha = selfA._parameters["Alpha"]
3693     Beta  = selfA._parameters["Beta"]
3694     if selfA._parameters["Kappa"] == 0:
3695         if selfA._parameters["EstimationOf"] == "State":
3696             Kappa = 0
3697         elif selfA._parameters["EstimationOf"] == "Parameters":
3698             Kappa = 3 - L
3699     else:
3700         Kappa = selfA._parameters["Kappa"]
3701     Lambda = float( Alpha**2 ) * ( L + Kappa ) - L
3702     Gamma  = math.sqrt( L + Lambda )
3703     #
3704     Ww = []
3705     Ww.append( 0. )
3706     for i in range(2*L):
3707         Ww.append( 1. / (2.*(L + Lambda)) )
3708     #
3709     Wm = numpy.array( Ww )
3710     Wm[0] = Lambda / (L + Lambda)
3711     Wc = numpy.array( Ww )
3712     Wc[0] = Lambda / (L + Lambda) + (1. - Alpha**2 + Beta)
3713     #
3714     # Opérateurs
3715     Hm = HO["Direct"].appliedControledFormTo
3716     #
3717     if selfA._parameters["EstimationOf"] == "State":
3718         Mm = EM["Direct"].appliedControledFormTo
3719     #
3720     if CM is not None and "Tangent" in CM and U is not None:
3721         Cm = CM["Tangent"].asMatrix(Xb)
3722     else:
3723         Cm = None
3724     #
3725     # Durée d'observation et tailles
3726     if hasattr(Y,"stepnumber"):
3727         duration = Y.stepnumber()
3728         __p = numpy.cumprod(Y.shape())[-1]
3729     else:
3730         duration = 2
3731         __p = numpy.array(Y).size
3732     #
3733     # Précalcul des inversions de B et R
3734     if selfA._parameters["StoreInternalVariables"] \
3735         or selfA._toStore("CostFunctionJ") \
3736         or selfA._toStore("CostFunctionJb") \
3737         or selfA._toStore("CostFunctionJo") \
3738         or selfA._toStore("CurrentOptimum") \
3739         or selfA._toStore("APosterioriCovariance"):
3740         BI = B.getI()
3741         RI = R.getI()
3742     #
3743     __n = Xb.size
3744     #
3745     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
3746         Xn = Xb
3747         if hasattr(B,"asfullmatrix"):
3748             Pn = B.asfullmatrix(__n)
3749         else:
3750             Pn = B
3751         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3752         selfA.StoredVariables["Analysis"].store( Xb )
3753         if selfA._toStore("APosterioriCovariance"):
3754             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
3755     elif selfA._parameters["nextStep"]:
3756         Xn = selfA._getInternalState("Xn")
3757         Pn = selfA._getInternalState("Pn")
3758     #
3759     if selfA._parameters["EstimationOf"] == "Parameters":
3760         XaMin            = Xn
3761         previousJMinimum = numpy.finfo(float).max
3762     #
3763     for step in range(duration-1):
3764         if hasattr(Y,"store"):
3765             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
3766         else:
3767             Ynpu = numpy.ravel( Y ).reshape((__p,1))
3768         #
3769         if U is not None:
3770             if hasattr(U,"store") and len(U)>1:
3771                 Un = numpy.ravel( U[step] ).reshape((-1,1))
3772             elif hasattr(U,"store") and len(U)==1:
3773                 Un = numpy.ravel( U[0] ).reshape((-1,1))
3774             else:
3775                 Un = numpy.ravel( U ).reshape((-1,1))
3776         else:
3777             Un = None
3778         #
3779         Pndemi = numpy.linalg.cholesky(Pn)
3780         Xnp = numpy.hstack([Xn, Xn+Gamma*Pndemi, Xn-Gamma*Pndemi])
3781         nbSpts = 2*Xn.size+1
3782         #
3783         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3784             for point in range(nbSpts):
3785                 Xnp[:,point] = ApplyBounds( Xnp[:,point], selfA._parameters["Bounds"] )
3786         #
3787         XEtnnp = []
3788         for point in range(nbSpts):
3789             if selfA._parameters["EstimationOf"] == "State":
3790                 XEtnnpi = numpy.asmatrix(numpy.ravel( Mm( (Xnp[:,point], Un) ) )).T
3791                 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
3792                     Cm = Cm.reshape(Xn.size,Un.size) # ADAO & check shape
3793                     XEtnnpi = XEtnnpi + Cm * Un
3794                 if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3795                     XEtnnpi = ApplyBounds( XEtnnpi, selfA._parameters["Bounds"] )
3796             elif selfA._parameters["EstimationOf"] == "Parameters":
3797                 # --- > Par principe, M = Id, Q = 0
3798                 XEtnnpi = Xnp[:,point]
3799             XEtnnp.append( XEtnnpi )
3800         XEtnnp = numpy.hstack( XEtnnp )
3801         #
3802         Xncm = numpy.matrix( XEtnnp.getA()*numpy.array(Wm) ).sum(axis=1)
3803         #
3804         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3805             Xncm = ApplyBounds( Xncm, selfA._parameters["Bounds"] )
3806         #
3807         if selfA._parameters["EstimationOf"] == "State":        Pnm = Q
3808         elif selfA._parameters["EstimationOf"] == "Parameters": Pnm = 0.
3809         for point in range(nbSpts):
3810             Pnm += Wc[i] * (XEtnnp[:,point]-Xncm) * (XEtnnp[:,point]-Xncm).T
3811         #
3812         if selfA._parameters["EstimationOf"] == "Parameters" and selfA._parameters["Bounds"] is not None:
3813             Pnmdemi = selfA._parameters["Reconditioner"] * numpy.linalg.cholesky(Pnm)
3814         else:
3815             Pnmdemi = numpy.linalg.cholesky(Pnm)
3816         #
3817         Xnnp = numpy.hstack([Xncm, Xncm+Gamma*Pnmdemi, Xncm-Gamma*Pnmdemi])
3818         #
3819         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3820             for point in range(nbSpts):
3821                 Xnnp[:,point] = ApplyBounds( Xnnp[:,point], selfA._parameters["Bounds"] )
3822         #
3823         Ynnp = []
3824         for point in range(nbSpts):
3825             if selfA._parameters["EstimationOf"] == "State":
3826                 Ynnpi = numpy.asmatrix(numpy.ravel( Hm( (Xnnp[:,point], None) ) )).T
3827             elif selfA._parameters["EstimationOf"] == "Parameters":
3828                 Ynnpi = numpy.asmatrix(numpy.ravel( Hm( (Xnnp[:,point], Un) ) )).T
3829             Ynnp.append( Ynnpi )
3830         Ynnp = numpy.hstack( Ynnp )
3831         #
3832         Yncm = numpy.matrix( Ynnp.getA()*numpy.array(Wm) ).sum(axis=1)
3833         #
3834         Pyyn = R
3835         Pxyn = 0.
3836         for point in range(nbSpts):
3837             Pyyn += Wc[i] * (Ynnp[:,point]-Yncm) * (Ynnp[:,point]-Yncm).T
3838             Pxyn += Wc[i] * (Xnnp[:,point]-Xncm) * (Ynnp[:,point]-Yncm).T
3839         #
3840         _Innovation  = Ynpu - Yncm
3841         if selfA._parameters["EstimationOf"] == "Parameters":
3842             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
3843                 _Innovation = _Innovation - Cm * Un
3844         #
3845         Kn = Pxyn * Pyyn.I
3846         Xn = Xncm + Kn * _Innovation
3847         Pn = Pnm - Kn * Pyyn * Kn.T
3848         #
3849         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3850             Xn = ApplyBounds( Xn, selfA._parameters["Bounds"] )
3851         #
3852         Xa = Xn # Pointeurs
3853         #--------------------------
3854         selfA._setInternalState("Xn", Xn)
3855         selfA._setInternalState("Pn", Pn)
3856         #--------------------------
3857         #
3858         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3859         # ---> avec analysis
3860         selfA.StoredVariables["Analysis"].store( Xa )
3861         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
3862             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( Hm((Xa, Un)) )
3863         if selfA._toStore("InnovationAtCurrentAnalysis"):
3864             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
3865         # ---> avec current state
3866         if selfA._parameters["StoreInternalVariables"] \
3867             or selfA._toStore("CurrentState"):
3868             selfA.StoredVariables["CurrentState"].store( Xn )
3869         if selfA._toStore("ForecastState"):
3870             selfA.StoredVariables["ForecastState"].store( Xncm )
3871         if selfA._toStore("ForecastCovariance"):
3872             selfA.StoredVariables["ForecastCovariance"].store( Pnm )
3873         if selfA._toStore("BMA"):
3874             selfA.StoredVariables["BMA"].store( Xncm - Xa )
3875         if selfA._toStore("InnovationAtCurrentState"):
3876             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
3877         if selfA._toStore("SimulatedObservationAtCurrentState") \
3878             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3879             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( Yncm )
3880         # ---> autres
3881         if selfA._parameters["StoreInternalVariables"] \
3882             or selfA._toStore("CostFunctionJ") \
3883             or selfA._toStore("CostFunctionJb") \
3884             or selfA._toStore("CostFunctionJo") \
3885             or selfA._toStore("CurrentOptimum") \
3886             or selfA._toStore("APosterioriCovariance"):
3887             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
3888             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
3889             J   = Jb + Jo
3890             selfA.StoredVariables["CostFunctionJb"].store( Jb )
3891             selfA.StoredVariables["CostFunctionJo"].store( Jo )
3892             selfA.StoredVariables["CostFunctionJ" ].store( J )
3893             #
3894             if selfA._toStore("IndexOfOptimum") \
3895                 or selfA._toStore("CurrentOptimum") \
3896                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
3897                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
3898                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
3899                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3900                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3901             if selfA._toStore("IndexOfOptimum"):
3902                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3903             if selfA._toStore("CurrentOptimum"):
3904                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
3905             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3906                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
3907             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3908                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3909             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3910                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3911             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3912                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3913         if selfA._toStore("APosterioriCovariance"):
3914             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
3915         if selfA._parameters["EstimationOf"] == "Parameters" \
3916             and J < previousJMinimum:
3917             previousJMinimum    = J
3918             XaMin               = Xa
3919             if selfA._toStore("APosterioriCovariance"):
3920                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
3921     #
3922     # Stockage final supplémentaire de l'optimum en estimation de paramètres
3923     # ----------------------------------------------------------------------
3924     if selfA._parameters["EstimationOf"] == "Parameters":
3925         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3926         selfA.StoredVariables["Analysis"].store( XaMin )
3927         if selfA._toStore("APosterioriCovariance"):
3928             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
3929         if selfA._toStore("BMA"):
3930             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
3931     #
3932     return 0
3933
3934 # ==============================================================================
3935 def uskf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3936     """
3937     Unscented Kalman Filter
3938     """
3939     if selfA._parameters["EstimationOf"] == "Parameters":
3940         selfA._parameters["StoreInternalVariables"] = True
3941     #
3942     L     = Xb.size
3943     Alpha = selfA._parameters["Alpha"]
3944     Beta  = selfA._parameters["Beta"]
3945     if selfA._parameters["Kappa"] == 0:
3946         if selfA._parameters["EstimationOf"] == "State":
3947             Kappa = 0
3948         elif selfA._parameters["EstimationOf"] == "Parameters":
3949             Kappa = 3 - L
3950     else:
3951         Kappa = selfA._parameters["Kappa"]
3952     Lambda = float( Alpha**2 ) * ( L + Kappa ) - L
3953     Gamma  = math.sqrt( L + Lambda )
3954     #
3955     Ww = []
3956     Ww.append( 0. )
3957     for i in range(2*L):
3958         Ww.append( 1. / (2.*(L + Lambda)) )
3959     #
3960     Wm = numpy.array( Ww )
3961     Wm[0] = Lambda / (L + Lambda)
3962     Wc = numpy.array( Ww )
3963     Wc[0] = Lambda / (L + Lambda) + (1. - Alpha**2 + Beta)
3964     #
3965     # Opérateurs
3966     Hm = HO["Direct"].appliedControledFormTo
3967     #
3968     if selfA._parameters["EstimationOf"] == "State":
3969         Mm = EM["Direct"].appliedControledFormTo
3970     #
3971     if CM is not None and "Tangent" in CM and U is not None:
3972         Cm = CM["Tangent"].asMatrix(Xb)
3973     else:
3974         Cm = None
3975     #
3976     # Durée d'observation et tailles
3977     if hasattr(Y,"stepnumber"):
3978         duration = Y.stepnumber()
3979         __p = numpy.cumprod(Y.shape())[-1]
3980     else:
3981         duration = 2
3982         __p = numpy.array(Y).size
3983     #
3984     # Précalcul des inversions de B et R
3985     if selfA._parameters["StoreInternalVariables"] \
3986         or selfA._toStore("CostFunctionJ") \
3987         or selfA._toStore("CostFunctionJb") \
3988         or selfA._toStore("CostFunctionJo") \
3989         or selfA._toStore("CurrentOptimum") \
3990         or selfA._toStore("APosterioriCovariance"):
3991         BI = B.getI()
3992         RI = R.getI()
3993     #
3994     __n = Xb.size
3995     #
3996     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
3997         Xn = Xb
3998         if hasattr(B,"asfullmatrix"):
3999             Pn = B.asfullmatrix(__n)
4000         else:
4001             Pn = B
4002         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
4003         selfA.StoredVariables["Analysis"].store( Xb )
4004         if selfA._toStore("APosterioriCovariance"):
4005             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
4006     elif selfA._parameters["nextStep"]:
4007         Xn = selfA._getInternalState("Xn")
4008         Pn = selfA._getInternalState("Pn")
4009     #
4010     if selfA._parameters["EstimationOf"] == "Parameters":
4011         XaMin            = Xn
4012         previousJMinimum = numpy.finfo(float).max
4013     #
4014     for step in range(duration-1):
4015         if hasattr(Y,"store"):
4016             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
4017         else:
4018             Ynpu = numpy.ravel( Y ).reshape((__p,1))
4019         #
4020         if U is not None:
4021             if hasattr(U,"store") and len(U)>1:
4022                 Un = numpy.ravel( U[step] ).reshape((-1,1))
4023             elif hasattr(U,"store") and len(U)==1:
4024                 Un = numpy.ravel( U[0] ).reshape((-1,1))
4025             else:
4026                 Un = numpy.ravel( U ).reshape((-1,1))
4027         else:
4028             Un = None
4029         #
4030         Pndemi = numpy.linalg.cholesky(Pn)
4031         Xnp = numpy.hstack([Xn, Xn+Gamma*Pndemi, Xn-Gamma*Pndemi])
4032         nbSpts = 2*Xn.size+1
4033         #
4034         XEtnnp = []
4035         for point in range(nbSpts):
4036             if selfA._parameters["EstimationOf"] == "State":
4037                 XEtnnpi = numpy.asmatrix(numpy.ravel( Mm( (Xnp[:,point], Un) ) )).T
4038                 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
4039                     Cm = Cm.reshape(Xn.size,Un.size) # ADAO & check shape
4040                     XEtnnpi = XEtnnpi + Cm * Un
4041             elif selfA._parameters["EstimationOf"] == "Parameters":
4042                 # --- > Par principe, M = Id, Q = 0
4043                 XEtnnpi = Xnp[:,point]
4044             XEtnnp.append( XEtnnpi )
4045         XEtnnp = numpy.hstack( XEtnnp )
4046         #
4047         Xncm = numpy.matrix( XEtnnp.getA()*numpy.array(Wm) ).sum(axis=1)
4048         #
4049         if selfA._parameters["EstimationOf"] == "State":        Pnm = Q
4050         elif selfA._parameters["EstimationOf"] == "Parameters": Pnm = 0.
4051         for point in range(nbSpts):
4052             Pnm += Wc[i] * (XEtnnp[:,point]-Xncm) * (XEtnnp[:,point]-Xncm).T
4053         #
4054         Pnmdemi = numpy.linalg.cholesky(Pnm)
4055         #
4056         Xnnp = numpy.hstack([Xncm, Xncm+Gamma*Pnmdemi, Xncm-Gamma*Pnmdemi])
4057         #
4058         Ynnp = []
4059         for point in range(nbSpts):
4060             if selfA._parameters["EstimationOf"] == "State":
4061                 Ynnpi = numpy.asmatrix(numpy.ravel( Hm( (Xnnp[:,point], None) ) )).T
4062             elif selfA._parameters["EstimationOf"] == "Parameters":
4063                 Ynnpi = numpy.asmatrix(numpy.ravel( Hm( (Xnnp[:,point], Un) ) )).T
4064             Ynnp.append( Ynnpi )
4065         Ynnp = numpy.hstack( Ynnp )
4066         #
4067         Yncm = numpy.matrix( Ynnp.getA()*numpy.array(Wm) ).sum(axis=1)
4068         #
4069         Pyyn = R
4070         Pxyn = 0.
4071         for point in range(nbSpts):
4072             Pyyn += Wc[i] * (Ynnp[:,point]-Yncm) * (Ynnp[:,point]-Yncm).T
4073             Pxyn += Wc[i] * (Xnnp[:,point]-Xncm) * (Ynnp[:,point]-Yncm).T
4074         #
4075         _Innovation  = Ynpu - Yncm
4076         if selfA._parameters["EstimationOf"] == "Parameters":
4077             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
4078                 _Innovation = _Innovation - Cm * Un
4079         #
4080         Kn = Pxyn * Pyyn.I
4081         Xn = Xncm + Kn * _Innovation
4082         Pn = Pnm - Kn * Pyyn * Kn.T
4083         #
4084         Xa = Xn # Pointeurs
4085         #--------------------------
4086         selfA._setInternalState("Xn", Xn)
4087         selfA._setInternalState("Pn", Pn)
4088         #--------------------------
4089         #
4090         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
4091         # ---> avec analysis
4092         selfA.StoredVariables["Analysis"].store( Xa )
4093         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
4094             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( Hm((Xa, Un)) )
4095         if selfA._toStore("InnovationAtCurrentAnalysis"):
4096             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
4097         # ---> avec current state
4098         if selfA._parameters["StoreInternalVariables"] \
4099             or selfA._toStore("CurrentState"):
4100             selfA.StoredVariables["CurrentState"].store( Xn )
4101         if selfA._toStore("ForecastState"):
4102             selfA.StoredVariables["ForecastState"].store( Xncm )
4103         if selfA._toStore("ForecastCovariance"):
4104             selfA.StoredVariables["ForecastCovariance"].store( Pnm )
4105         if selfA._toStore("BMA"):
4106             selfA.StoredVariables["BMA"].store( Xncm - Xa )
4107         if selfA._toStore("InnovationAtCurrentState"):
4108             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
4109         if selfA._toStore("SimulatedObservationAtCurrentState") \
4110             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
4111             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( Yncm )
4112         # ---> autres
4113         if selfA._parameters["StoreInternalVariables"] \
4114             or selfA._toStore("CostFunctionJ") \
4115             or selfA._toStore("CostFunctionJb") \
4116             or selfA._toStore("CostFunctionJo") \
4117             or selfA._toStore("CurrentOptimum") \
4118             or selfA._toStore("APosterioriCovariance"):
4119             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
4120             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
4121             J   = Jb + Jo
4122             selfA.StoredVariables["CostFunctionJb"].store( Jb )
4123             selfA.StoredVariables["CostFunctionJo"].store( Jo )
4124             selfA.StoredVariables["CostFunctionJ" ].store( J )
4125             #
4126             if selfA._toStore("IndexOfOptimum") \
4127                 or selfA._toStore("CurrentOptimum") \
4128                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
4129                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
4130                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
4131                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
4132                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
4133             if selfA._toStore("IndexOfOptimum"):
4134                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
4135             if selfA._toStore("CurrentOptimum"):
4136                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
4137             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
4138                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
4139             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
4140                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
4141             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
4142                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
4143             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
4144                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
4145         if selfA._toStore("APosterioriCovariance"):
4146             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
4147         if selfA._parameters["EstimationOf"] == "Parameters" \
4148             and J < previousJMinimum:
4149             previousJMinimum    = J
4150             XaMin               = Xa
4151             if selfA._toStore("APosterioriCovariance"):
4152                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
4153     #
4154     # Stockage final supplémentaire de l'optimum en estimation de paramètres
4155     # ----------------------------------------------------------------------
4156     if selfA._parameters["EstimationOf"] == "Parameters":
4157         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
4158         selfA.StoredVariables["Analysis"].store( XaMin )
4159         if selfA._toStore("APosterioriCovariance"):
4160             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
4161         if selfA._toStore("BMA"):
4162             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
4163     #
4164     return 0
4165
4166 # ==============================================================================
4167 def van3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
4168     """
4169     3DVAR variational analysis with no inversion of B
4170     """
4171     #
4172     # Initialisations
4173     # ---------------
4174     #
4175     # Opérateurs
4176     Hm = HO["Direct"].appliedTo
4177     Ha = HO["Adjoint"].appliedInXTo
4178     #
4179     # Précalcul des inversions de B et R
4180     BT = B.getT()
4181     RI = R.getI()
4182     #
4183     # Point de démarrage de l'optimisation
4184     Xini = numpy.zeros(Xb.shape)
4185     #
4186     # Définition de la fonction-coût
4187     # ------------------------------
4188     def CostFunction(v):
4189         _V = numpy.asmatrix(numpy.ravel( v )).T
4190         _X = Xb + B * _V
4191         if selfA._parameters["StoreInternalVariables"] or \
4192             selfA._toStore("CurrentState") or \
4193             selfA._toStore("CurrentOptimum"):
4194             selfA.StoredVariables["CurrentState"].store( _X )
4195         _HX = Hm( _X )
4196         _HX = numpy.asmatrix(numpy.ravel( _HX )).T
4197         _Innovation = Y - _HX
4198         if selfA._toStore("SimulatedObservationAtCurrentState") or \
4199             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
4200             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
4201         if selfA._toStore("InnovationAtCurrentState"):
4202             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
4203         #
4204         Jb  = float( 0.5 * _V.T * BT * _V )
4205         Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
4206         J   = Jb + Jo
4207         #
4208         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
4209         selfA.StoredVariables["CostFunctionJb"].store( Jb )
4210         selfA.StoredVariables["CostFunctionJo"].store( Jo )
4211         selfA.StoredVariables["CostFunctionJ" ].store( J )
4212         if selfA._toStore("IndexOfOptimum") or \
4213             selfA._toStore("CurrentOptimum") or \
4214             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
4215             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
4216             selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
4217             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
4218             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
4219         if selfA._toStore("IndexOfOptimum"):
4220             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
4221         if selfA._toStore("CurrentOptimum"):
4222             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
4223         if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
4224             selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
4225         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
4226             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
4227         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
4228             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
4229         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
4230             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
4231         return J
4232     #
4233     def GradientOfCostFunction(v):
4234         _V = numpy.asmatrix(numpy.ravel( v )).T
4235         _X = Xb + B * _V
4236         _HX     = Hm( _X )
4237         _HX     = numpy.asmatrix(numpy.ravel( _HX )).T
4238         GradJb  = BT * _V
4239         GradJo  = - Ha( (_X, RI * (Y - _HX)) )
4240         GradJ   = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
4241         return GradJ
4242     #
4243     # Minimisation de la fonctionnelle
4244     # --------------------------------
4245     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
4246     #
4247     if selfA._parameters["Minimizer"] == "LBFGSB":
4248         if "0.19" <= scipy.version.version <= "1.1.0":
4249             import lbfgsbhlt as optimiseur
4250         else:
4251             import scipy.optimize as optimiseur
4252         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
4253             func        = CostFunction,
4254             x0          = Xini,
4255             fprime      = GradientOfCostFunction,
4256             args        = (),
4257             bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
4258             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
4259             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
4260             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
4261             iprint      = selfA._parameters["optiprint"],
4262             )
4263         nfeval = Informations['funcalls']
4264         rc     = Informations['warnflag']
4265     elif selfA._parameters["Minimizer"] == "TNC":
4266         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
4267             func        = CostFunction,
4268             x0          = Xini,
4269             fprime      = GradientOfCostFunction,
4270             args        = (),
4271             bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
4272             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
4273             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
4274             ftol        = selfA._parameters["CostDecrementTolerance"],
4275             messages    = selfA._parameters["optmessages"],
4276             )
4277     elif selfA._parameters["Minimizer"] == "CG":
4278         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
4279             f           = CostFunction,
4280             x0          = Xini,
4281             fprime      = GradientOfCostFunction,
4282             args        = (),
4283             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
4284             gtol        = selfA._parameters["GradientNormTolerance"],
4285             disp        = selfA._parameters["optdisp"],
4286             full_output = True,
4287             )
4288     elif selfA._parameters["Minimizer"] == "NCG":
4289         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
4290             f           = CostFunction,
4291             x0          = Xini,
4292             fprime      = GradientOfCostFunction,
4293             args        = (),
4294             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
4295             avextol     = selfA._parameters["CostDecrementTolerance"],
4296             disp        = selfA._parameters["optdisp"],
4297             full_output = True,
4298             )
4299     elif selfA._parameters["Minimizer"] == "BFGS":
4300         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
4301             f           = CostFunction,
4302             x0          = Xini,
4303             fprime      = GradientOfCostFunction,
4304             args        = (),
4305             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
4306             gtol        = selfA._parameters["GradientNormTolerance"],
4307             disp        = selfA._parameters["optdisp"],
4308             full_output = True,
4309             )
4310     else:
4311         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
4312     #
4313     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
4314     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
4315     #
4316     # Correction pour pallier a un bug de TNC sur le retour du Minimum
4317     # ----------------------------------------------------------------
4318     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
4319         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
4320         Minimum = numpy.asmatrix(numpy.ravel( Minimum )).T
4321     else:
4322         Minimum = Xb + B * numpy.asmatrix(numpy.ravel( Minimum )).T
4323     #
4324     # Obtention de l'analyse
4325     # ----------------------
4326     Xa = Minimum
4327     #
4328     selfA.StoredVariables["Analysis"].store( Xa )
4329     #
4330     if selfA._toStore("OMA") or \
4331         selfA._toStore("SigmaObs2") or \
4332         selfA._toStore("SimulationQuantiles") or \
4333         selfA._toStore("SimulatedObservationAtOptimum"):
4334         if selfA._toStore("SimulatedObservationAtCurrentState"):
4335             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
4336         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
4337             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
4338         else:
4339             HXa = Hm( Xa )
4340     #
4341     # Calcul de la covariance d'analyse
4342     # ---------------------------------
4343     if selfA._toStore("APosterioriCovariance") or \
4344         selfA._toStore("SimulationQuantiles") or \
4345         selfA._toStore("JacobianMatrixAtOptimum") or \
4346         selfA._toStore("KalmanGainAtOptimum"):
4347         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
4348         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
4349     if selfA._toStore("APosterioriCovariance") or \
4350         selfA._toStore("SimulationQuantiles") or \
4351         selfA._toStore("KalmanGainAtOptimum"):
4352         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
4353         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
4354     if selfA._toStore("APosterioriCovariance") or \
4355         selfA._toStore("SimulationQuantiles"):
4356         BI = B.getI()
4357         HessienneI = []
4358         nb = Xa.size
4359         for i in range(nb):
4360             _ee    = numpy.matrix(numpy.zeros(nb)).T
4361             _ee[i] = 1.
4362             _HtEE  = numpy.dot(HtM,_ee)
4363             _HtEE  = numpy.asmatrix(numpy.ravel( _HtEE )).T
4364             HessienneI.append( numpy.ravel( BI*_ee + HaM * (RI * _HtEE) ) )
4365         HessienneI = numpy.matrix( HessienneI )
4366         A = HessienneI.I
4367         if min(A.shape) != max(A.shape):
4368             raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(selfA._name,str(A.shape)))
4369         if (numpy.diag(A) < 0).any():
4370             raise ValueError("The %s a posteriori covariance matrix A has at least one negative value on its diagonal. There is an error in the observation operator, please check it."%(selfA._name,))
4371         if logging.getLogger().level < logging.WARNING: # La verification n'a lieu qu'en debug
4372             try:
4373                 L = numpy.linalg.cholesky( A )
4374             except:
4375                 raise ValueError("The %s a posteriori covariance matrix A is not symmetric positive-definite. Please check your a priori covariances and your observation operator."%(selfA._name,))
4376     if selfA._toStore("APosterioriCovariance"):
4377         selfA.StoredVariables["APosterioriCovariance"].store( A )
4378     if selfA._toStore("JacobianMatrixAtOptimum"):
4379         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
4380     if selfA._toStore("KalmanGainAtOptimum"):
4381         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
4382         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
4383         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
4384     #
4385     # Calculs et/ou stockages supplémentaires
4386     # ---------------------------------------
4387     if selfA._toStore("Innovation") or \
4388         selfA._toStore("SigmaObs2") or \
4389         selfA._toStore("MahalanobisConsistency") or \
4390         selfA._toStore("OMB"):
4391         d  = Y - HXb
4392     if selfA._toStore("Innovation"):
4393         selfA.StoredVariables["Innovation"].store( numpy.ravel(d) )
4394     if selfA._toStore("BMA"):
4395         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
4396     if selfA._toStore("OMA"):
4397         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
4398     if selfA._toStore("OMB"):
4399         selfA.StoredVariables["OMB"].store( numpy.ravel(d) )
4400     if selfA._toStore("SigmaObs2"):
4401         TraceR = R.trace(Y.size)
4402         selfA.StoredVariables["SigmaObs2"].store( float( (d.T * (numpy.asmatrix(numpy.ravel(Y)).T-numpy.asmatrix(numpy.ravel(HXa)).T)) ) / TraceR )
4403     if selfA._toStore("MahalanobisConsistency"):
4404         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
4405     if selfA._toStore("SimulationQuantiles"):
4406         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
4407     if selfA._toStore("SimulatedObservationAtBackground"):
4408         selfA.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
4409     if selfA._toStore("SimulatedObservationAtOptimum"):
4410         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
4411     #
4412     return 0
4413
4414 # ==============================================================================
4415 if __name__ == "__main__":
4416     print('\n AUTODIAGNOSTIC\n')